From 605081dc84696eb31f217dec3c5be86a9ec38fc8 Mon Sep 17 00:00:00 2001 From: Michael Valdron Date: Mon, 10 Jul 2023 17:54:07 -0400 Subject: [PATCH] Add OpenAPI 3 specification and source generation for REST API (#175) * index file json schema added. Signed-off-by: Michael Valdron * Swagger 2.0 converted to OpenAPI 3.0 Signed-off-by: Michael Valdron * oapi-codegen added as dependency. Signed-off-by: Michael Valdron * Codegen config for server package schema types added. Signed-off-by: Michael Valdron * V2 index file schema added. Signed-off-by: Michael Valdron * correct operationIds in index server spec Signed-off-by: Michael Valdron * endpoint code gen config added. Signed-off-by: Michael Valdron * index server dependencies modified. Signed-off-by: Michael Valdron * REST API endpoint definitions and bindings generated. Signed-off-by: Michael Valdron * Code refactored to use generated source. Signed-off-by: Michael Valdron * starter project route added with other openapi schema changes. Signed-off-by: Michael Valdron * rename swagger.yaml to openapi.yaml Signed-off-by: Michael Valdron * index and download starter project api endpoint signatures refactored. Signed-off-by: Michael Valdron * Endpoints added to openapi spec. Signed-off-by: Michael Valdron * template header new year updates. Signed-off-by: Michael Valdron * source generation config files moved to base directory of index server. Signed-off-by: Michael Valdron * imports.tmpl file for source generation header added to index server. Signed-off-by: Michael Valdron * typing changes to openapi spec of index server. Signed-off-by: Michael Valdron * codegen script added. Signed-off-by: Michael Valdron * fix deps from rebase Signed-off-by: Michael Valdron * root, viewer, oci server endpoints added to openapi spec with schema fixups Signed-off-by: Michael Valdron * new types generated Signed-off-by: Michael Valdron * endpoint changes. Signed-off-by: Michael Valdron * Set base directory of build registry script, now can be run from any pwd Signed-off-by: Michael Valdron * proxy and static endpoints reverted from OpenAPI, OpenAPI validation middleware only applies to OpenAPI routes. Signed-off-by: Michael Valdron * correct invalid changes introduced in f991981 Signed-off-by: Michael Valdron * index schemas OpenAPI specs removed for a future issue Signed-off-by: Michael Valdron * codegen happens on build Signed-off-by: Michael Valdron * query parameter schema changes. Signed-off-by: Michael Valdron * update source to updated schemas. Signed-off-by: Michael Valdron * codegen changes. Signed-off-by: Michael Valdron * add README documentation on OpenAPI code generation. Signed-off-by: Michael Valdron --------- Signed-off-by: Michael Valdron --- .github/workflows/ci.yaml | 47 +- .github/workflows/pushimage-next.yaml | 12 +- README.md | 5 + build_registry.sh | 10 +- index/server/README.md | 53 +- index/server/build.sh | 3 + index/server/codegen.sh | 24 + index/server/config/endpoint.yaml | 25 + index/server/config/types.yaml | 25 + index/server/go.mod | 29 +- index/server/go.sum | 79 +- index/server/openapi.yaml | 447 + index/server/pkg/server/endpoint.gen.go | 515 ++ index/server/pkg/server/endpoint.go | 207 +- index/server/pkg/server/endpoint_test.go | 47 +- index/server/pkg/server/index.go | 106 +- index/server/pkg/server/types.gen.go | 110 + index/server/swagger.yaml | 2728 ------ index/server/templates/imports.tmpl | 17 + .../apapsch/go-jsonmerge/v2/.editorconfig | 6 + .../apapsch/go-jsonmerge/v2/.gitattributes | 175 + .../apapsch/go-jsonmerge/v2/.gitignore | 11 + .../apapsch/go-jsonmerge/v2/.gitlab-ci.yml | 42 + .../apapsch/go-jsonmerge/v2/.travis.yml | 19 + .../apapsch/go-jsonmerge/v2/LICENSE | 21 + .../apapsch/go-jsonmerge/v2/README.md | 81 + .../apapsch/go-jsonmerge/v2/build.cmd | 25 + .../apapsch/go-jsonmerge/v2/build.sh | 19 + .../github.com/apapsch/go-jsonmerge/v2/doc.go | 52 + .../apapsch/go-jsonmerge/v2/merge.go | 167 + .../github.com/deepmap/oapi-codegen/LICENSE | 201 + .../pkg/gin-middleware/oapi_validate.go | 196 + .../pkg/gin-middleware/test_spec.yaml | 103 + .../deepmap/oapi-codegen/pkg/runtime/bind.go | 24 + .../oapi-codegen/pkg/runtime/bindform.go | 309 + .../oapi-codegen/pkg/runtime/bindparam.go | 526 ++ .../oapi-codegen/pkg/runtime/bindstring.go | 174 + .../oapi-codegen/pkg/runtime/deepobject.go | 358 + .../oapi-codegen/pkg/runtime/jsonmerge.go | 26 + .../oapi-codegen/pkg/runtime/styleparam.go | 473 + .../deepmap/oapi-codegen/pkg/types/date.go | 43 + .../deepmap/oapi-codegen/pkg/types/email.go | 27 + .../deepmap/oapi-codegen/pkg/types/file.go | 71 + .../deepmap/oapi-codegen/pkg/types/regexes.go | 11 + .../deepmap/oapi-codegen/pkg/types/uuid.go | 7 + .../github.com/getkin/kin-openapi/LICENSE | 21 + .../getkin/kin-openapi/openapi3/callback.go | 48 + .../getkin/kin-openapi/openapi3/components.go | 242 + .../getkin/kin-openapi/openapi3/content.go | 124 + .../kin-openapi/openapi3/discriminator.go | 49 + .../getkin/kin-openapi/openapi3/doc.go | 4 + .../getkin/kin-openapi/openapi3/encoding.go | 136 + .../getkin/kin-openapi/openapi3/errors.go | 59 + .../getkin/kin-openapi/openapi3/example.go | 93 + .../openapi3/example_validation.go | 16 + .../getkin/kin-openapi/openapi3/extension.go | 24 + .../kin-openapi/openapi3/external_docs.go | 61 + .../getkin/kin-openapi/openapi3/header.go | 103 + .../getkin/kin-openapi/openapi3/info.go | 185 + .../kin-openapi/openapi3/internalize_refs.go | 434 + .../getkin/kin-openapi/openapi3/link.go | 101 + .../getkin/kin-openapi/openapi3/loader.go | 1039 +++ .../kin-openapi/openapi3/loader_uri_reader.go | 112 + .../getkin/kin-openapi/openapi3/media_type.go | 167 + .../getkin/kin-openapi/openapi3/openapi3.go | 156 + .../getkin/kin-openapi/openapi3/operation.go | 216 + .../getkin/kin-openapi/openapi3/parameter.go | 418 + .../getkin/kin-openapi/openapi3/path_item.go | 211 + .../getkin/kin-openapi/openapi3/paths.go | 243 + .../getkin/kin-openapi/openapi3/ref.go | 7 + .../getkin/kin-openapi/openapi3/refs.go | 695 ++ .../kin-openapi/openapi3/request_body.go | 146 + .../getkin/kin-openapi/openapi3/response.go | 183 + .../getkin/kin-openapi/openapi3/schema.go | 2121 +++++ .../kin-openapi/openapi3/schema_formats.go | 106 + .../openapi3/schema_validation_settings.go | 79 + .../openapi3/security_requirements.go | 51 + .../kin-openapi/openapi3/security_scheme.go | 412 + .../openapi3/serialization_method.go | 17 + .../getkin/kin-openapi/openapi3/server.go | 278 + .../getkin/kin-openapi/openapi3/tag.go | 87 + .../openapi3/testdata/circularRef/base.yml | 16 + .../openapi3/testdata/circularRef/other.yml | 10 + .../testdata/recursiveRef/components/Bar.yml | 2 + .../testdata/recursiveRef/components/Cat.yml | 4 + .../testdata/recursiveRef/components/Foo.yml | 4 + .../recursiveRef/components/Foo/Foo2.yml | 4 + .../recursiveRef/components/models/error.yaml | 2 + .../testdata/recursiveRef/issue615.yml | 60 + .../testdata/recursiveRef/openapi.yml | 33 + .../recursiveRef/openapi.yml.internalized.yml | 110 + .../recursiveRef/parameters/number.yml | 4 + .../testdata/recursiveRef/paths/foo.yml | 15 + .../openapi3/validation_options.go | 112 + .../getkin/kin-openapi/openapi3/visited.go | 41 + .../getkin/kin-openapi/openapi3/xml.go | 66 + .../openapi3filter/authentication_input.go | 29 + .../kin-openapi/openapi3filter/errors.go | 92 + .../kin-openapi/openapi3filter/internal.go | 25 + .../kin-openapi/openapi3filter/middleware.go | 283 + .../kin-openapi/openapi3filter/options.go | 44 + .../openapi3filter/req_resp_decoder.go | 1295 +++ .../openapi3filter/req_resp_encoder.go | 49 + .../openapi3filter/validate_request.go | 398 + .../openapi3filter/validate_request_input.go | 38 + .../openapi3filter/validate_response.go | 224 + .../openapi3filter/validate_response_input.go | 42 + .../openapi3filter/validation_error.go | 85 + .../validation_error_encoder.go | 186 + .../openapi3filter/validation_handler.go | 107 + .../openapi3filter/validation_kit.go | 85 + .../kin-openapi/routers/gorillamux/router.go | 263 + .../routers/legacy/pathpattern/node.go | 328 + .../kin-openapi/routers/legacy/router.go | 167 + .../getkin/kin-openapi/routers/types.go | 42 + .../github.com/gin-gonic/gin/.golangci.yml | 39 + .../github.com/gin-gonic/gin/.goreleaser.yaml | 57 + .../github.com/gin-gonic/gin/.travis.yml | 48 - .../github.com/gin-gonic/gin/AUTHORS.md | 617 +- .../github.com/gin-gonic/gin/CHANGELOG.md | 48 + .../github.com/gin-gonic/gin/CONTRIBUTING.md | 2 +- .../vendor/github.com/gin-gonic/gin/Makefile | 10 +- .../vendor/github.com/gin-gonic/gin/README.md | 186 +- .../vendor/github.com/gin-gonic/gin/any.go | 10 + .../vendor/github.com/gin-gonic/gin/auth.go | 4 +- .../github.com/gin-gonic/gin/binding/any.go | 10 + .../gin-gonic/gin/binding/binding.go | 24 +- .../gin/binding/binding_nomsgpack.go | 22 +- .../gin/binding/default_validator.go | 40 +- .../github.com/gin-gonic/gin/binding/form.go | 15 +- .../gin-gonic/gin/binding/form_mapping.go | 59 +- .../gin-gonic/gin/binding/header.go | 10 +- .../github.com/gin-gonic/gin/binding/json.go | 12 +- .../gin-gonic/gin/binding/msgpack.go | 8 +- .../gin/binding/multipart_form_mapping.go | 32 +- .../gin-gonic/gin/binding/protobuf.go | 15 +- .../github.com/gin-gonic/gin/binding/query.go | 4 +- .../github.com/gin-gonic/gin/binding/toml.go | 35 + .../github.com/gin-gonic/gin/binding/uri.go | 6 +- .../github.com/gin-gonic/gin/binding/xml.go | 8 +- .../github.com/gin-gonic/gin/binding/yaml.go | 8 +- .../github.com/gin-gonic/gin/context.go | 316 +- .../gin-gonic/gin/context_appengine.go | 2 +- .../vendor/github.com/gin-gonic/gin/debug.go | 14 +- .../github.com/gin-gonic/gin/deprecated.go | 4 +- .../vendor/github.com/gin-gonic/gin/errors.go | 14 +- .../vendor/github.com/gin-gonic/gin/fs.go | 4 +- .../vendor/github.com/gin-gonic/gin/gin.go | 129 +- .../gin-gonic/gin/internal/json/go_json.go | 23 + .../gin-gonic/gin/internal/json/json.go | 6 +- .../gin-gonic/gin/internal/json/jsoniter.go | 2 +- .../vendor/github.com/gin-gonic/gin/logger.go | 17 +- .../vendor/github.com/gin-gonic/gin/mode.go | 17 +- .../github.com/gin-gonic/gin/recovery.go | 18 +- .../github.com/gin-gonic/gin/render/any.go | 10 + .../github.com/gin-gonic/gin/render/data.go | 2 +- .../github.com/gin-gonic/gin/render/html.go | 10 +- .../github.com/gin-gonic/gin/render/json.go | 44 +- .../gin-gonic/gin/render/msgpack.go | 8 +- .../gin-gonic/gin/render/protobuf.go | 6 +- .../github.com/gin-gonic/gin/render/reader.go | 2 +- .../gin-gonic/gin/render/redirect.go | 2 +- .../github.com/gin-gonic/gin/render/render.go | 3 +- .../github.com/gin-gonic/gin/render/text.go | 6 +- .../github.com/gin-gonic/gin/render/toml.go | 36 + .../github.com/gin-gonic/gin/render/xml.go | 4 +- .../github.com/gin-gonic/gin/render/yaml.go | 4 +- .../gin-gonic/gin/response_writer.go | 18 +- .../github.com/gin-gonic/gin/routergroup.go | 52 +- .../github.com/gin-gonic/gin/test_helpers.go | 2 +- .../vendor/github.com/gin-gonic/gin/tree.go | 22 +- .../vendor/github.com/gin-gonic/gin/utils.go | 21 +- .../github.com/gin-gonic/gin/version.go | 4 +- .../github.com/go-openapi/swag/.gitattributes | 2 + .../github.com/go-openapi/swag/.golangci.yml | 11 + .../github.com/go-openapi/swag/.travis.yml | 37 - .../vendor/github.com/go-openapi/swag/file.go | 33 + .../github.com/go-openapi/swag/post_go18.go | 1 + .../github.com/go-openapi/swag/post_go19.go | 1 + .../github.com/go-openapi/swag/pre_go18.go | 1 + .../github.com/go-openapi/swag/pre_go19.go | 1 + .../go-playground/locales/README.md | 4 +- .../locales/currency/currency.go | 3 + .../universal-translator/Makefile | 18 + .../universal-translator/README.md | 6 +- .../universal-translator/import_export.go | 2 + .../universal-translator/translator.go | 12 +- .../go-playground/validator/v10/.travis.yml | 29 - .../validator/v10/MAINTAINERS.md | 16 + .../go-playground/validator/v10/Makefile | 4 +- .../go-playground/validator/v10/README.md | 61 +- .../go-playground/validator/v10/baked_in.go | 997 +- .../go-playground/validator/v10/cache.go | 9 +- .../validator/v10/country_codes.go | 970 ++ .../validator/v10/currency_codes.go | 79 + .../go-playground/validator/v10/doc.go | 95 +- .../go-playground/validator/v10/errors.go | 22 +- .../validator/v10/field_level.go | 23 +- .../validator/v10/postcode_regexes.go | 173 + .../go-playground/validator/v10/regexes.go | 50 +- .../validator/v10/struct_level.go | 12 +- .../go-playground/validator/v10/util.go | 2 +- .../go-playground/validator/v10/validator.go | 17 +- .../validator/v10/validator_instance.go | 96 +- .../github.com/goccy/go-json/.codecov.yml | 32 + .../github.com/goccy/go-json/.gitignore | 2 + .../github.com/goccy/go-json/.golangci.yml | 83 + .../github.com/goccy/go-json/CHANGELOG.md | 393 + .../vendor/github.com/goccy/go-json/LICENSE | 21 + .../vendor/github.com/goccy/go-json/Makefile | 39 + .../vendor/github.com/goccy/go-json/README.md | 529 ++ .../vendor/github.com/goccy/go-json/color.go | 68 + .../vendor/github.com/goccy/go-json/decode.go | 232 + .../goccy/go-json/docker-compose.yml | 13 + .../vendor/github.com/goccy/go-json/encode.go | 326 + .../vendor/github.com/goccy/go-json/error.go | 39 + .../internal/decoder/anonymous_field.go | 37 + .../goccy/go-json/internal/decoder/array.go | 169 + .../goccy/go-json/internal/decoder/bool.go | 78 + .../goccy/go-json/internal/decoder/bytes.go | 113 + .../goccy/go-json/internal/decoder/compile.go | 487 + .../internal/decoder/compile_norace.go | 29 + .../go-json/internal/decoder/compile_race.go | 37 + .../goccy/go-json/internal/decoder/context.go | 254 + .../goccy/go-json/internal/decoder/float.go | 158 + .../goccy/go-json/internal/decoder/func.go | 141 + .../goccy/go-json/internal/decoder/int.go | 242 + .../go-json/internal/decoder/interface.go | 458 + .../goccy/go-json/internal/decoder/invalid.go | 45 + .../goccy/go-json/internal/decoder/map.go | 187 + .../goccy/go-json/internal/decoder/number.go | 112 + .../goccy/go-json/internal/decoder/option.go | 15 + .../goccy/go-json/internal/decoder/ptr.go | 87 + .../goccy/go-json/internal/decoder/slice.go | 301 + .../goccy/go-json/internal/decoder/stream.go | 556 ++ .../goccy/go-json/internal/decoder/string.go | 441 + .../goccy/go-json/internal/decoder/struct.go | 819 ++ .../goccy/go-json/internal/decoder/type.go | 29 + .../goccy/go-json/internal/decoder/uint.go | 190 + .../internal/decoder/unmarshal_json.go | 99 + .../internal/decoder/unmarshal_text.go | 280 + .../internal/decoder/wrapped_string.go | 68 + .../goccy/go-json/internal/encoder/code.go | 1017 ++ .../goccy/go-json/internal/encoder/compact.go | 286 + .../go-json/internal/encoder/compiler.go | 930 ++ .../internal/encoder/compiler_norace.go | 32 + .../go-json/internal/encoder/compiler_race.go | 45 + .../goccy/go-json/internal/encoder/context.go | 105 + .../go-json/internal/encoder/decode_rune.go | 126 + .../goccy/go-json/internal/encoder/encoder.go | 596 ++ .../goccy/go-json/internal/encoder/indent.go | 211 + .../goccy/go-json/internal/encoder/int.go | 152 + .../goccy/go-json/internal/encoder/map112.go | 9 + .../goccy/go-json/internal/encoder/map113.go | 9 + .../goccy/go-json/internal/encoder/opcode.go | 669 ++ .../goccy/go-json/internal/encoder/option.go | 47 + .../goccy/go-json/internal/encoder/optype.go | 932 ++ .../goccy/go-json/internal/encoder/query.go | 135 + .../goccy/go-json/internal/encoder/string.go | 459 + .../go-json/internal/encoder/string_table.go | 415 + .../go-json/internal/encoder/vm/debug_vm.go | 35 + .../goccy/go-json/internal/encoder/vm/hack.go | 9 + .../goccy/go-json/internal/encoder/vm/util.go | 207 + .../goccy/go-json/internal/encoder/vm/vm.go | 4859 ++++++++++ .../internal/encoder/vm_color/debug_vm.go | 35 + .../go-json/internal/encoder/vm_color/hack.go | 9 + .../go-json/internal/encoder/vm_color/util.go | 274 + .../go-json/internal/encoder/vm_color/vm.go | 4859 ++++++++++ .../encoder/vm_color_indent/debug_vm.go | 35 + .../internal/encoder/vm_color_indent/util.go | 296 + .../internal/encoder/vm_color_indent/vm.go | 4859 ++++++++++ .../internal/encoder/vm_indent/debug_vm.go | 35 + .../internal/encoder/vm_indent/hack.go | 9 + .../internal/encoder/vm_indent/util.go | 229 + .../go-json/internal/encoder/vm_indent/vm.go | 4859 ++++++++++ .../goccy/go-json/internal/errors/error.go | 164 + .../goccy/go-json/internal/runtime/rtype.go | 263 + .../go-json/internal/runtime/struct_field.go | 91 + .../goccy/go-json/internal/runtime/type.go | 100 + .../vendor/github.com/goccy/go-json/json.go | 371 + .../vendor/github.com/goccy/go-json/option.go | 72 + .../vendor/github.com/goccy/go-json/query.go | 47 + .../vendor/github.com/google/uuid/.travis.yml | 9 + .../github.com/google/uuid/CONTRIBUTING.md | 10 + .../github.com/google/uuid/CONTRIBUTORS | 9 + .../vendor/github.com/google/uuid/LICENSE | 27 + .../vendor/github.com/google/uuid/README.md | 19 + .../vendor/github.com/google/uuid/dce.go | 80 + .../vendor/github.com/google/uuid/doc.go | 12 + .../vendor/github.com/google/uuid/hash.go | 53 + .../vendor/github.com/google/uuid/marshal.go | 38 + .../vendor/github.com/google/uuid/node.go | 90 + .../vendor/github.com/google/uuid/node_js.go | 12 + .../vendor/github.com/google/uuid/node_net.go | 33 + .../vendor/github.com/google/uuid/null.go | 118 + .../vendor/github.com/google/uuid/sql.go | 59 + .../vendor/github.com/google/uuid/time.go | 123 + .../vendor/github.com/google/uuid/util.go | 43 + .../vendor/github.com/google/uuid/uuid.go | 294 + .../vendor/github.com/google/uuid/version1.go | 44 + .../vendor/github.com/google/uuid/version4.go | 76 + .../vendor/github.com/invopop/yaml/.gitignore | 20 + .../github.com/invopop/yaml/.golangci.toml | 15 + .../vendor/github.com/invopop/yaml/LICENSE | 50 + .../vendor/github.com/invopop/yaml/README.md | 128 + .../vendor/github.com/invopop/yaml/fields.go | 498 + .../vendor/github.com/invopop/yaml/yaml.go | 309 + .../github.com/leodido/go-urn/.travis.yml | 6 +- .../vendor/github.com/leodido/go-urn/urn.go | 23 + .../mailru/easyjson/jlexer/lexer.go | 14 + .../github.com/mattn/go-colorable/.travis.yml | 9 - .../github.com/mattn/go-colorable/README.md | 6 +- .../mattn/go-colorable/colorable_appengine.go | 15 +- .../mattn/go-colorable/colorable_others.go | 18 +- .../mattn/go-colorable/colorable_windows.go | 113 +- .../github.com/mattn/go-colorable/go.test.sh | 12 + .../mattn/go-colorable/noncolorable.go | 18 +- .../github.com/mattn/go-isatty/.travis.yml | 14 - .../github.com/mattn/go-isatty/isatty_bsd.go | 1 + .../mattn/go-isatty/isatty_others.go | 3 +- .../mattn/go-isatty/isatty_plan9.go | 1 + .../mattn/go-isatty/isatty_solaris.go | 9 +- .../mattn/go-isatty/isatty_tcgets.go | 3 +- .../mattn/go-isatty/isatty_windows.go | 6 +- .../github.com/mattn/go-isatty/renovate.json | 8 - .../github.com/mohae/deepcopy/.gitignore | 26 + .../github.com/mohae/deepcopy/.travis.yml | 11 + .../vendor/github.com/mohae/deepcopy/LICENSE | 21 + .../github.com/mohae/deepcopy/README.md | 8 + .../github.com/mohae/deepcopy/deepcopy.go | 125 + .../pelletier/go-toml/v2/.dockerignore | 2 + .../pelletier/go-toml/v2/.gitattributes | 4 + .../pelletier/go-toml/v2/.gitignore | 6 + .../pelletier/go-toml/v2/.golangci.toml | 84 + .../pelletier/go-toml/v2/.goreleaser.yaml | 123 + .../pelletier/go-toml/v2/CONTRIBUTING.md | 196 + .../pelletier/go-toml/v2/Dockerfile | 5 + .../github.com/pelletier/go-toml/v2/LICENSE | 21 + .../github.com/pelletier/go-toml/v2/README.md | 552 ++ .../pelletier/go-toml/v2/SECURITY.md | 19 + .../github.com/pelletier/go-toml/v2/ci.sh | 279 + .../github.com/pelletier/go-toml/v2/decode.go | 544 ++ .../github.com/pelletier/go-toml/v2/doc.go | 2 + .../github.com/pelletier/go-toml/v2/errors.go | 270 + .../pelletier/go-toml/v2/internal/ast/ast.go | 144 + .../go-toml/v2/internal/ast/builder.go | 51 + .../pelletier/go-toml/v2/internal/ast/kind.go | 69 + .../go-toml/v2/internal/danger/danger.go | 65 + .../go-toml/v2/internal/danger/typeid.go | 23 + .../go-toml/v2/internal/tracker/key.go | 50 + .../go-toml/v2/internal/tracker/seen.go | 356 + .../go-toml/v2/internal/tracker/tracker.go | 1 + .../pelletier/go-toml/v2/localtime.go | 120 + .../pelletier/go-toml/v2/marshaler.go | 1040 +++ .../github.com/pelletier/go-toml/v2/parser.go | 1086 +++ .../pelletier/go-toml/v2/scanner.go | 269 + .../github.com/pelletier/go-toml/v2/strict.go | 107 + .../github.com/pelletier/go-toml/v2/toml.abnf | 243 + .../github.com/pelletier/go-toml/v2/types.go | 14 + .../pelletier/go-toml/v2/unmarshaler.go | 1227 +++ .../github.com/pelletier/go-toml/v2/utf8.go | 240 + .../perimeterx/marshmallow/.gitignore | 4 + .../github.com/perimeterx/marshmallow/LICENSE | 21 + .../perimeterx/marshmallow/README.md | 196 + .../perimeterx/marshmallow/cache.go | 63 + .../github.com/perimeterx/marshmallow/doc.go | 10 + .../perimeterx/marshmallow/errors.go | 101 + .../perimeterx/marshmallow/options.go | 77 + .../perimeterx/marshmallow/reflection.go | 197 + .../perimeterx/marshmallow/unmarshal.go | 378 + .../marshmallow/unmarshal_from_json_map.go | 290 + .../vendor/github.com/ugorji/go/codec/LICENSE | 2 +- .../github.com/ugorji/go/codec/README.md | 283 + .../vendor/github.com/ugorji/go/codec/binc.go | 798 +- .../github.com/ugorji/go/codec/build.sh | 204 +- .../vendor/github.com/ugorji/go/codec/cbor.go | 510 +- .../github.com/ugorji/go/codec/codecgen.go | 10 +- .../github.com/ugorji/go/codec/decimal.go | 499 + .../github.com/ugorji/go/codec/decode.go | 1740 ++-- .../vendor/github.com/ugorji/go/codec/doc.go | 16 +- .../github.com/ugorji/go/codec/encode.go | 700 +- .../ugorji/go/codec/fast-path.generated.go | 8175 ++++++----------- .../ugorji/go/codec/fast-path.go.tmpl | 234 +- .../ugorji/go/codec/fast-path.not.go | 24 +- .../github.com/ugorji/go/codec/float.go | 313 - .../ugorji/go/codec/gen-dec-array.go.tmpl | 2 +- .../ugorji/go/codec/gen-dec-map.go.tmpl | 23 +- .../ugorji/go/codec/gen-helper.generated.go | 108 +- .../ugorji/go/codec/gen-helper.go.tmpl | 98 +- .../ugorji/go/codec/gen.generated.go | 27 +- .../vendor/github.com/ugorji/go/codec/gen.go | 1368 ++- .../go/codec/goversion_arrayof_gte_go15.go | 3 +- .../go/codec/goversion_arrayof_lt_go15.go | 12 +- .../go/codec/goversion_fmt_time_gte_go15.go | 7 +- .../go/codec/goversion_fmt_time_lt_go15.go | 7 +- ..._go19.go => goversion_makemap_lt_go110.go} | 5 +- ...goversion_makemap_not_unsafe_gte_go110.go} | 9 +- .../goversion_makemap_unsafe_gte_go110.go | 25 + .../go/codec/goversion_maprange_gte_go112.go | 9 +- .../go/codec/goversion_maprange_lt_go112.go | 10 +- ...version_unexportedembeddedptr_gte_go110.go | 3 +- ...oversion_unexportedembeddedptr_lt_go110.go | 3 +- .../go/codec/goversion_unsupported_lt_go14.go | 9 +- .../go/codec/goversion_vendor_eq_go15.go | 3 +- .../go/codec/goversion_vendor_eq_go16.go | 3 +- .../go/codec/goversion_vendor_gte_go17.go | 3 +- .../go/codec/goversion_vendor_lt_go15.go | 3 +- .../github.com/ugorji/go/codec/helper.go | 2545 ++--- .../ugorji/go/codec/helper_internal.go | 147 +- .../ugorji/go/codec/helper_not_unsafe.go | 403 +- .../go/codec/helper_not_unsafe_not_gc.go | 19 + .../ugorji/go/codec/helper_unsafe.go | 959 +- .../go/codec/helper_unsafe_compiler_gc.go | 171 + .../go/codec/helper_unsafe_compiler_not_gc.go | 80 + .../vendor/github.com/ugorji/go/codec/json.go | 1114 ++- .../ugorji/go/codec/mammoth-test.go.tmpl | 95 +- .../ugorji/go/codec/mammoth2-test.go.tmpl | 49 +- .../github.com/ugorji/go/codec/msgpack.go | 556 +- .../github.com/ugorji/go/codec/prebuild.go | 136 - .../github.com/ugorji/go/codec/reader.go | 865 +- .../ugorji/go/codec/register_ext.go | 2 +- .../vendor/github.com/ugorji/go/codec/rpc.go | 132 +- .../github.com/ugorji/go/codec/simple.go | 367 +- .../ugorji/go/codec/sort-slice.generated.go | 136 +- .../ugorji/go/codec/sort-slice.go.tmpl | 4 +- .../vendor/github.com/ugorji/go/codec/test.py | 50 +- .../github.com/ugorji/go/codec/writer.go | 94 +- .../vendor/golang.org/x/net/http2/h2c/h2c.go | 240 + index/server/vendor/modules.txt | 74 +- 429 files changed, 79251 insertions(+), 15827 deletions(-) create mode 100644 index/server/codegen.sh create mode 100644 index/server/config/endpoint.yaml create mode 100644 index/server/config/types.yaml create mode 100644 index/server/openapi.yaml create mode 100644 index/server/pkg/server/endpoint.gen.go create mode 100644 index/server/pkg/server/types.gen.go delete mode 100644 index/server/swagger.yaml create mode 100644 index/server/templates/imports.tmpl create mode 100644 index/server/vendor/github.com/apapsch/go-jsonmerge/v2/.editorconfig create mode 100644 index/server/vendor/github.com/apapsch/go-jsonmerge/v2/.gitattributes create mode 100644 index/server/vendor/github.com/apapsch/go-jsonmerge/v2/.gitignore create mode 100644 index/server/vendor/github.com/apapsch/go-jsonmerge/v2/.gitlab-ci.yml create mode 100644 index/server/vendor/github.com/apapsch/go-jsonmerge/v2/.travis.yml create mode 100644 index/server/vendor/github.com/apapsch/go-jsonmerge/v2/LICENSE create mode 100644 index/server/vendor/github.com/apapsch/go-jsonmerge/v2/README.md create mode 100644 index/server/vendor/github.com/apapsch/go-jsonmerge/v2/build.cmd create mode 100644 index/server/vendor/github.com/apapsch/go-jsonmerge/v2/build.sh create mode 100644 index/server/vendor/github.com/apapsch/go-jsonmerge/v2/doc.go create mode 100644 index/server/vendor/github.com/apapsch/go-jsonmerge/v2/merge.go create mode 100644 index/server/vendor/github.com/deepmap/oapi-codegen/LICENSE create mode 100644 index/server/vendor/github.com/deepmap/oapi-codegen/pkg/gin-middleware/oapi_validate.go create mode 100644 index/server/vendor/github.com/deepmap/oapi-codegen/pkg/gin-middleware/test_spec.yaml create mode 100644 index/server/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bind.go create mode 100644 index/server/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bindform.go create mode 100644 index/server/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bindparam.go create mode 100644 index/server/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bindstring.go create mode 100644 index/server/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/deepobject.go create mode 100644 index/server/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/jsonmerge.go create mode 100644 index/server/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/styleparam.go create mode 100644 index/server/vendor/github.com/deepmap/oapi-codegen/pkg/types/date.go create mode 100644 index/server/vendor/github.com/deepmap/oapi-codegen/pkg/types/email.go create mode 100644 index/server/vendor/github.com/deepmap/oapi-codegen/pkg/types/file.go create mode 100644 index/server/vendor/github.com/deepmap/oapi-codegen/pkg/types/regexes.go create mode 100644 index/server/vendor/github.com/deepmap/oapi-codegen/pkg/types/uuid.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/LICENSE create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/callback.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/components.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/content.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/discriminator.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/doc.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/encoding.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/errors.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/example.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/example_validation.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/extension.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/external_docs.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/header.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/info.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/internalize_refs.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/link.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/loader.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/loader_uri_reader.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/media_type.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/openapi3.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/operation.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/parameter.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/path_item.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/paths.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/ref.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/refs.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/request_body.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/response.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/schema.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/schema_formats.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/schema_validation_settings.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/security_requirements.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/security_scheme.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/serialization_method.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/server.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/tag.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/circularRef/base.yml create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/circularRef/other.yml create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Bar.yml create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Cat.yml create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Foo.yml create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Foo/Foo2.yml create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/models/error.yaml create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/issue615.yml create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/openapi.yml create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/openapi.yml.internalized.yml create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/parameters/number.yml create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/paths/foo.yml create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/validation_options.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/visited.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3/xml.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/authentication_input.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/errors.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/internal.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/middleware.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/options.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/req_resp_decoder.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/req_resp_encoder.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/validate_request.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/validate_request_input.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/validate_response.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/validate_response_input.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/validation_error.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/validation_error_encoder.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/validation_handler.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/validation_kit.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/routers/gorillamux/router.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/routers/legacy/pathpattern/node.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/routers/legacy/router.go create mode 100644 index/server/vendor/github.com/getkin/kin-openapi/routers/types.go create mode 100644 index/server/vendor/github.com/gin-gonic/gin/.golangci.yml create mode 100644 index/server/vendor/github.com/gin-gonic/gin/.goreleaser.yaml delete mode 100644 index/server/vendor/github.com/gin-gonic/gin/.travis.yml create mode 100644 index/server/vendor/github.com/gin-gonic/gin/any.go create mode 100644 index/server/vendor/github.com/gin-gonic/gin/binding/any.go create mode 100644 index/server/vendor/github.com/gin-gonic/gin/binding/toml.go create mode 100644 index/server/vendor/github.com/gin-gonic/gin/internal/json/go_json.go create mode 100644 index/server/vendor/github.com/gin-gonic/gin/render/any.go create mode 100644 index/server/vendor/github.com/gin-gonic/gin/render/toml.go create mode 100644 index/server/vendor/github.com/go-openapi/swag/.gitattributes delete mode 100644 index/server/vendor/github.com/go-openapi/swag/.travis.yml create mode 100644 index/server/vendor/github.com/go-openapi/swag/file.go create mode 100644 index/server/vendor/github.com/go-playground/universal-translator/Makefile delete mode 100644 index/server/vendor/github.com/go-playground/validator/v10/.travis.yml create mode 100644 index/server/vendor/github.com/go-playground/validator/v10/MAINTAINERS.md create mode 100644 index/server/vendor/github.com/go-playground/validator/v10/currency_codes.go create mode 100644 index/server/vendor/github.com/go-playground/validator/v10/postcode_regexes.go create mode 100644 index/server/vendor/github.com/goccy/go-json/.codecov.yml create mode 100644 index/server/vendor/github.com/goccy/go-json/.gitignore create mode 100644 index/server/vendor/github.com/goccy/go-json/.golangci.yml create mode 100644 index/server/vendor/github.com/goccy/go-json/CHANGELOG.md create mode 100644 index/server/vendor/github.com/goccy/go-json/LICENSE create mode 100644 index/server/vendor/github.com/goccy/go-json/Makefile create mode 100644 index/server/vendor/github.com/goccy/go-json/README.md create mode 100644 index/server/vendor/github.com/goccy/go-json/color.go create mode 100644 index/server/vendor/github.com/goccy/go-json/decode.go create mode 100644 index/server/vendor/github.com/goccy/go-json/docker-compose.yml create mode 100644 index/server/vendor/github.com/goccy/go-json/encode.go create mode 100644 index/server/vendor/github.com/goccy/go-json/error.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/decoder/anonymous_field.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/decoder/array.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/decoder/bool.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/decoder/bytes.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/decoder/compile.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/decoder/compile_norace.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/decoder/compile_race.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/decoder/context.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/decoder/float.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/decoder/func.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/decoder/int.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/decoder/interface.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/decoder/invalid.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/decoder/map.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/decoder/number.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/decoder/option.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/decoder/ptr.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/decoder/slice.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/decoder/stream.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/decoder/string.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/decoder/struct.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/decoder/type.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/decoder/uint.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/decoder/unmarshal_json.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/decoder/unmarshal_text.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/decoder/wrapped_string.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/code.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/compact.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/compiler.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/compiler_norace.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/compiler_race.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/context.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/decode_rune.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/encoder.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/indent.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/int.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/map112.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/map113.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/opcode.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/option.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/optype.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/query.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/string.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/string_table.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/vm/debug_vm.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/vm/hack.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/vm/util.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/vm/vm.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_color/debug_vm.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_color/hack.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_color/util.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_color/vm.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_color_indent/debug_vm.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_color_indent/util.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_color_indent/vm.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_indent/debug_vm.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_indent/hack.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_indent/util.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_indent/vm.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/errors/error.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/runtime/rtype.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/runtime/struct_field.go create mode 100644 index/server/vendor/github.com/goccy/go-json/internal/runtime/type.go create mode 100644 index/server/vendor/github.com/goccy/go-json/json.go create mode 100644 index/server/vendor/github.com/goccy/go-json/option.go create mode 100644 index/server/vendor/github.com/goccy/go-json/query.go create mode 100644 index/server/vendor/github.com/google/uuid/.travis.yml create mode 100644 index/server/vendor/github.com/google/uuid/CONTRIBUTING.md create mode 100644 index/server/vendor/github.com/google/uuid/CONTRIBUTORS create mode 100644 index/server/vendor/github.com/google/uuid/LICENSE create mode 100644 index/server/vendor/github.com/google/uuid/README.md create mode 100644 index/server/vendor/github.com/google/uuid/dce.go create mode 100644 index/server/vendor/github.com/google/uuid/doc.go create mode 100644 index/server/vendor/github.com/google/uuid/hash.go create mode 100644 index/server/vendor/github.com/google/uuid/marshal.go create mode 100644 index/server/vendor/github.com/google/uuid/node.go create mode 100644 index/server/vendor/github.com/google/uuid/node_js.go create mode 100644 index/server/vendor/github.com/google/uuid/node_net.go create mode 100644 index/server/vendor/github.com/google/uuid/null.go create mode 100644 index/server/vendor/github.com/google/uuid/sql.go create mode 100644 index/server/vendor/github.com/google/uuid/time.go create mode 100644 index/server/vendor/github.com/google/uuid/util.go create mode 100644 index/server/vendor/github.com/google/uuid/uuid.go create mode 100644 index/server/vendor/github.com/google/uuid/version1.go create mode 100644 index/server/vendor/github.com/google/uuid/version4.go create mode 100644 index/server/vendor/github.com/invopop/yaml/.gitignore create mode 100644 index/server/vendor/github.com/invopop/yaml/.golangci.toml create mode 100644 index/server/vendor/github.com/invopop/yaml/LICENSE create mode 100644 index/server/vendor/github.com/invopop/yaml/README.md create mode 100644 index/server/vendor/github.com/invopop/yaml/fields.go create mode 100644 index/server/vendor/github.com/invopop/yaml/yaml.go delete mode 100644 index/server/vendor/github.com/mattn/go-colorable/.travis.yml create mode 100644 index/server/vendor/github.com/mattn/go-colorable/go.test.sh delete mode 100644 index/server/vendor/github.com/mattn/go-isatty/.travis.yml delete mode 100644 index/server/vendor/github.com/mattn/go-isatty/renovate.json create mode 100644 index/server/vendor/github.com/mohae/deepcopy/.gitignore create mode 100644 index/server/vendor/github.com/mohae/deepcopy/.travis.yml create mode 100644 index/server/vendor/github.com/mohae/deepcopy/LICENSE create mode 100644 index/server/vendor/github.com/mohae/deepcopy/README.md create mode 100644 index/server/vendor/github.com/mohae/deepcopy/deepcopy.go create mode 100644 index/server/vendor/github.com/pelletier/go-toml/v2/.dockerignore create mode 100644 index/server/vendor/github.com/pelletier/go-toml/v2/.gitattributes create mode 100644 index/server/vendor/github.com/pelletier/go-toml/v2/.gitignore create mode 100644 index/server/vendor/github.com/pelletier/go-toml/v2/.golangci.toml create mode 100644 index/server/vendor/github.com/pelletier/go-toml/v2/.goreleaser.yaml create mode 100644 index/server/vendor/github.com/pelletier/go-toml/v2/CONTRIBUTING.md create mode 100644 index/server/vendor/github.com/pelletier/go-toml/v2/Dockerfile create mode 100644 index/server/vendor/github.com/pelletier/go-toml/v2/LICENSE create mode 100644 index/server/vendor/github.com/pelletier/go-toml/v2/README.md create mode 100644 index/server/vendor/github.com/pelletier/go-toml/v2/SECURITY.md create mode 100644 index/server/vendor/github.com/pelletier/go-toml/v2/ci.sh create mode 100644 index/server/vendor/github.com/pelletier/go-toml/v2/decode.go create mode 100644 index/server/vendor/github.com/pelletier/go-toml/v2/doc.go create mode 100644 index/server/vendor/github.com/pelletier/go-toml/v2/errors.go create mode 100644 index/server/vendor/github.com/pelletier/go-toml/v2/internal/ast/ast.go create mode 100644 index/server/vendor/github.com/pelletier/go-toml/v2/internal/ast/builder.go create mode 100644 index/server/vendor/github.com/pelletier/go-toml/v2/internal/ast/kind.go create mode 100644 index/server/vendor/github.com/pelletier/go-toml/v2/internal/danger/danger.go create mode 100644 index/server/vendor/github.com/pelletier/go-toml/v2/internal/danger/typeid.go create mode 100644 index/server/vendor/github.com/pelletier/go-toml/v2/internal/tracker/key.go create mode 100644 index/server/vendor/github.com/pelletier/go-toml/v2/internal/tracker/seen.go create mode 100644 index/server/vendor/github.com/pelletier/go-toml/v2/internal/tracker/tracker.go create mode 100644 index/server/vendor/github.com/pelletier/go-toml/v2/localtime.go create mode 100644 index/server/vendor/github.com/pelletier/go-toml/v2/marshaler.go create mode 100644 index/server/vendor/github.com/pelletier/go-toml/v2/parser.go create mode 100644 index/server/vendor/github.com/pelletier/go-toml/v2/scanner.go create mode 100644 index/server/vendor/github.com/pelletier/go-toml/v2/strict.go create mode 100644 index/server/vendor/github.com/pelletier/go-toml/v2/toml.abnf create mode 100644 index/server/vendor/github.com/pelletier/go-toml/v2/types.go create mode 100644 index/server/vendor/github.com/pelletier/go-toml/v2/unmarshaler.go create mode 100644 index/server/vendor/github.com/pelletier/go-toml/v2/utf8.go create mode 100644 index/server/vendor/github.com/perimeterx/marshmallow/.gitignore create mode 100644 index/server/vendor/github.com/perimeterx/marshmallow/LICENSE create mode 100644 index/server/vendor/github.com/perimeterx/marshmallow/README.md create mode 100644 index/server/vendor/github.com/perimeterx/marshmallow/cache.go create mode 100644 index/server/vendor/github.com/perimeterx/marshmallow/doc.go create mode 100644 index/server/vendor/github.com/perimeterx/marshmallow/errors.go create mode 100644 index/server/vendor/github.com/perimeterx/marshmallow/options.go create mode 100644 index/server/vendor/github.com/perimeterx/marshmallow/reflection.go create mode 100644 index/server/vendor/github.com/perimeterx/marshmallow/unmarshal.go create mode 100644 index/server/vendor/github.com/perimeterx/marshmallow/unmarshal_from_json_map.go create mode 100644 index/server/vendor/github.com/ugorji/go/codec/README.md create mode 100644 index/server/vendor/github.com/ugorji/go/codec/decimal.go delete mode 100644 index/server/vendor/github.com/ugorji/go/codec/float.go rename index/server/vendor/github.com/ugorji/go/codec/{goversion_makemap_lt_go19.go => goversion_makemap_lt_go110.go} (67%) rename index/server/vendor/github.com/ugorji/go/codec/{goversion_makemap_gte_go19.go => goversion_makemap_not_unsafe_gte_go110.go} (56%) create mode 100644 index/server/vendor/github.com/ugorji/go/codec/goversion_makemap_unsafe_gte_go110.go create mode 100644 index/server/vendor/github.com/ugorji/go/codec/helper_not_unsafe_not_gc.go create mode 100644 index/server/vendor/github.com/ugorji/go/codec/helper_unsafe_compiler_gc.go create mode 100644 index/server/vendor/github.com/ugorji/go/codec/helper_unsafe_compiler_not_gc.go delete mode 100644 index/server/vendor/github.com/ugorji/go/codec/prebuild.go create mode 100644 index/server/vendor/golang.org/x/net/http2/h2c/h2c.go diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index b020f094..f87e3c1c 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -1,3 +1,17 @@ +# +# Copyright 2020-2023 Red Hat, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. name: CI on: @@ -28,6 +42,19 @@ jobs: - name: Check if registry-library build is working run: cd registry-library && bash ./build.sh + - name: Check index-server code generation + run: | + cd index/server + export GOPATH=$(go env GOPATH) + go install github.com/deepmap/oapi-codegen/cmd/oapi-codegen@v1.12.4 + bash codegen.sh + GEN_DIFFS=$(git diff --name-only --diff-filter=ACMRT | grep .gen.go$ | xargs) + if [[ ! -z "${GEN_DIFFS}" ]] + then + echo "generated source does not match current changes " + exit 1 + fi + - name: Run Gosec Security Scanner run: | export PATH=$PATH:$(go env GOPATH)/bin @@ -53,6 +80,11 @@ jobs: - name: Check out code uses: actions/checkout@v2 + - name: Setup Go environment + uses: actions/setup-go@v2 + with: + go-version: 1.18 + - name: Check license run: | export PATH=$PATH:$(go env GOPATH)/bin @@ -67,7 +99,11 @@ jobs: fi - name: Check if index server build is working - run: cd index/server && bash ./build.sh + run: | + cd index/server + export GOPATH=$(go env GOPATH) + go install github.com/deepmap/oapi-codegen/cmd/oapi-codegen@v1.12.4 + bash ./build.sh - name: Test index server run: cd index/server && go test ./... -coverprofile cover.out @@ -91,7 +127,7 @@ jobs: - name: Setup Go environment uses: actions/setup-go@v2 with: - go-version: 1.17 + go-version: 1.18 - name: Setup Minikube uses: manusa/actions-setup-minikube@v2.7.1 with: @@ -101,7 +137,10 @@ jobs: github token: ${{ secrets.GITHUB_TOKEN }} start args: '--addons=ingress' - name: Run the devfile registry integration tests - run: bash .ci/run_tests_minikube_linux.sh + run: | + export GOPATH=$(go env GOPATH) + go install github.com/deepmap/oapi-codegen/cmd/oapi-codegen@v1.12.4 + bash .ci/run_tests_minikube_linux.sh test_staging: name: Test Staging Devfile Registry @@ -112,7 +151,7 @@ jobs: - name: Setup Go environment uses: actions/setup-go@v2 with: - go-version: 1.17 + go-version: 1.18 - name: Run the devfile registry integration tests run: | # Run the integration tests diff --git a/.github/workflows/pushimage-next.yaml b/.github/workflows/pushimage-next.yaml index 30c12c42..648f414e 100644 --- a/.github/workflows/pushimage-next.yaml +++ b/.github/workflows/pushimage-next.yaml @@ -1,5 +1,5 @@ # -# Copyright 2020-2022 Red Hat, Inc. +# Copyright 2020-2023 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -27,6 +27,10 @@ jobs: steps: - name: Check out registry support source code uses: actions/checkout@v2 + - name: Setup Go environment + uses: actions/setup-go@v2 + with: + go-version: 1.18 - name: Login to Quay uses: docker/login-action@v1 with: @@ -34,7 +38,11 @@ jobs: username: ${{ secrets.QUAY_USERNAME }} password: ${{ secrets.QUAY_PASSWORD }} - name: Build the index server base image - run: cd index/server && bash ./build.sh + run: | + cd index/server + export GOPATH=$(go env GOPATH) + go install github.com/deepmap/oapi-codegen/cmd/oapi-codegen@v1.12.4 + bash ./build.sh - name: Push the index server base image run: cd index/server && bash ./push.sh quay.io/devfile/devfile-index-base:next diff --git a/README.md b/README.md index 547c32d7..f8f37c8a 100644 --- a/README.md +++ b/README.md @@ -28,6 +28,11 @@ followed by docker push //devfile-index:latest ``` +See the following for more on the component specific build process: + +- [Building the OCI Registry](oci-registry/README.md#build) +- [Building the Index Server](index/server/README.md#build) + ## Deploy ### Via the Devfile Registry Operator diff --git a/build_registry.sh b/build_registry.sh index e8dd7895..f4285810 100755 --- a/build_registry.sh +++ b/build_registry.sh @@ -5,11 +5,15 @@ # and want to test all of the components together shopt -s expand_aliases set -eux + +# Set base registry support directory +BASE_DIR=$(dirname $0) + #set the docker alias if necessary -. ./setenv.sh +. ${BASE_DIR}/setenv.sh # Build the index server base image -. ./index/server/build.sh +. ${BASE_DIR}/index/server/build.sh # Build the test devfile registry image -docker build -t devfile-index:latest -f .ci/Dockerfile . +docker build -t devfile-index:latest -f ${BASE_DIR}/.ci/Dockerfile ${BASE_DIR} diff --git a/index/server/README.md b/index/server/README.md index 61fb2e97..e678e6d1 100644 --- a/index/server/README.md +++ b/index/server/README.md @@ -6,12 +6,42 @@ Provides REST API support for devfile registries and serves [devfile registry vi For more information on REST API docs: [registry-REST-API.adoc](registry-REST-API.adoc) +## Defining Endpoints + +Edit the OpenAPI spec `openapi.yaml`, under `paths` you can define your endpoint, e.g. `GET /foo`: + +```yaml +paths: + /foo: + get: + summary: + description: + # 'serveFoo' points to handler function 'ServeFoo' + operationId: serveFoo + parameters: # the OpenAPI specifications of the endpoint parameters + # spec for passing a bar query parameter /foo?bar= + - name: bar + in: query + description: + required: false + schema: + type: string + responses: # the OpenAPI specifications for the endpoint responses + default: + description: + content: + # Content type(s) + text/html: {} +``` + +See [swagger.io/docs](https://swagger.io/docs/specification/paths-and-operations) for more information. + ## Build The registry index server is built into a container image, `devfile-index-base:latest`, by running the following script: ```sh -bash index/server/build.sh +bash build.sh ``` You retag it with one of the two command: @@ -31,15 +61,32 @@ podman tag devfile-index-base:latest Push your image into the an image repository with the following: ```sh -bash index/server/push.sh +bash push.sh ``` For example, if the image repository is quay.io then use the pattern `quay.io//devfile-index-base`: ```sh -bash index/server/push.sh quay.io/someuser/devfile-index-base +bash push.sh quay.io/someuser/devfile-index-base ``` +### Source Generation + +Index server build uses the CLI tool `oapi-codegen` to generate the schema types `pkg/server/types.gen.go` and endpoint definition `pkg/server/endpoint.gen.go` sources. When changing the OpenAPI specification, such as [defining endpoints](#defining-endpoints), it is required to regenerate these changes into the source. + +The source generation can be done by manually building the index server with: + +```bash +bash build.sh +``` +or to just generate the source files by running: + +```bash +bash codegen.sh +``` + +**Important**: When committing to this repository, it is *required* to include the up to date source generation in your pull requests. Not including up to date source generation will lead to the PR check to fail. + ## Testing Endpoint unit testing is defined under `pkg/server/endpoint_test.go` and can be performed by running the following: diff --git a/index/server/build.sh b/index/server/build.sh index 02d50099..22b54c4e 100755 --- a/index/server/build.sh +++ b/index/server/build.sh @@ -3,5 +3,8 @@ # Build the index container for the registry buildfolder="$(realpath $(dirname ${BASH_SOURCE[0]}))" +# Generate OpenAPI endpoint and type definitions +bash ${buildfolder}/codegen.sh + # Build the index server docker build -t devfile-index-base:latest $buildfolder diff --git a/index/server/codegen.sh b/index/server/codegen.sh new file mode 100644 index 00000000..be1a91ca --- /dev/null +++ b/index/server/codegen.sh @@ -0,0 +1,24 @@ +#!/bin/bash + +# Base of the index server directory +projectfolder="$(realpath $(dirname ${BASH_SOURCE[0]}))" + +# Generate source types from OpenAPI spec +echo "Generating type source.." +cd ${projectfolder} && ${GOPATH}/bin/oapi-codegen -config config/types.yaml openapi.yaml && cd - > /dev/null +if [ $? != 0 ] +then + echo "error with generating type source " + exit 1 +fi + +# Generate endpoint bindings source from OpenAPI spec +echo "Generating endpoint bindings source.." +cd ${projectfolder} && ${GOPATH}/bin/oapi-codegen -config config/endpoint.yaml openapi.yaml && cd - > /dev/null +if [ $? != 0 ] +then + echo "error with generating endpoint bindings source " + exit 1 +fi + +echo "Done." diff --git a/index/server/config/endpoint.yaml b/index/server/config/endpoint.yaml new file mode 100644 index 00000000..c1279add --- /dev/null +++ b/index/server/config/endpoint.yaml @@ -0,0 +1,25 @@ +package: server +generate: + gin-server: true + embedded-spec: true +output: pkg/server/endpoint.gen.go +output-options: + user-templates: + imports.tmpl: |- + // + // Copyright 2023 Red Hat, Inc. + // + // Licensed under the Apache License, Version 2.0 (the "License"); + // you may not use this file except in compliance with the License. + // You may obtain a copy of the License at + // + // http://www.apache.org/licenses/LICENSE-2.0 + // + // Unless required by applicable law or agreed to in writing, software + // distributed under the License is distributed on an "AS IS" BASIS, + // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + // See the License for the specific language governing permissions and + // limitations under the License. + // + // Code generated by {{.ModuleName}} version {{.Version}}. **DO NOT EDIT** + package {{.PackageName}} diff --git a/index/server/config/types.yaml b/index/server/config/types.yaml new file mode 100644 index 00000000..9cfc1efa --- /dev/null +++ b/index/server/config/types.yaml @@ -0,0 +1,25 @@ +package: server +generate: + models: true +output: pkg/server/types.gen.go +output-options: + skip-prune: true + user-templates: + imports.tmpl: |- + // + // Copyright 2023 Red Hat, Inc. + // + // Licensed under the Apache License, Version 2.0 (the "License"); + // you may not use this file except in compliance with the License. + // You may obtain a copy of the License at + // + // http://www.apache.org/licenses/LICENSE-2.0 + // + // Unless required by applicable law or agreed to in writing, software + // distributed under the License is distributed on an "AS IS" BASIS, + // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + // See the License for the specific language governing permissions and + // limitations under the License. + // + // Code generated by {{.ModuleName}} version {{.Version}}. **DO NOT EDIT** + package {{.PackageName}} diff --git a/index/server/go.mod b/index/server/go.mod index 1071563a..25ef3a7a 100644 --- a/index/server/go.mod +++ b/index/server/go.mod @@ -3,10 +3,12 @@ module github.com/devfile/registry-support/index/server go 1.18 require ( + github.com/deepmap/oapi-codegen v1.12.4 github.com/devfile/api/v2 v2.2.0 github.com/devfile/library/v2 v2.2.1-0.20230323124903-d36e409ff94f github.com/devfile/registry-support/index/generator v0.0.0-20230215135119-cdf58b183ec4 - github.com/gin-gonic/gin v1.7.7 + github.com/getkin/kin-openapi v0.117.0 + github.com/gin-gonic/gin v1.8.1 github.com/hashicorp/go-version v1.4.0 github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348 github.com/opencontainers/go-digest v1.0.0 @@ -23,6 +25,7 @@ require ( github.com/Microsoft/go-winio v0.5.2 // indirect github.com/ProtonMail/go-crypto v0.0.0-20210428141323-04723f9f07d7 // indirect github.com/acomagu/bufpipe v1.0.3 // indirect + github.com/apapsch/go-jsonmerge/v2 v2.0.0 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/cespare/xxhash/v2 v2.1.2 // indirect github.com/containerd/containerd v1.6.12 // indirect @@ -47,11 +50,12 @@ require ( github.com/go-logr/logr v1.2.3 // indirect github.com/go-openapi/jsonpointer v0.19.5 // indirect github.com/go-openapi/jsonreference v0.20.0 // indirect - github.com/go-openapi/swag v0.19.14 // indirect - github.com/go-playground/locales v0.13.0 // indirect - github.com/go-playground/universal-translator v0.17.0 // indirect - github.com/go-playground/validator/v10 v10.4.1 // indirect + github.com/go-openapi/swag v0.21.1 // indirect + github.com/go-playground/locales v0.14.0 // indirect + github.com/go-playground/universal-translator v0.18.0 // indirect + github.com/go-playground/validator/v10 v10.11.1 // indirect github.com/gobwas/glob v0.2.3 // indirect + github.com/goccy/go-json v0.9.11 // indirect github.com/gogo/protobuf v1.3.2 // indirect github.com/golang/mock v1.6.0 // indirect github.com/golang/protobuf v1.5.2 // indirect @@ -59,20 +63,22 @@ require ( github.com/google/gnostic v0.5.7-v3refs // indirect github.com/google/go-cmp v0.5.9 // indirect github.com/google/gofuzz v1.2.0 // indirect + github.com/google/uuid v1.3.0 // indirect github.com/gorilla/mux v1.8.0 // indirect github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/go-multierror v1.1.1 // indirect github.com/imdario/mergo v0.3.12 // indirect + github.com/invopop/yaml v0.1.0 // indirect github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect github.com/josharian/intern v1.0.0 // indirect github.com/json-iterator/go v1.1.12 // indirect github.com/kevinburke/ssh_config v0.0.0-20201106050909-4977a11b4351 // indirect github.com/klauspost/compress v1.13.6 // indirect - github.com/leodido/go-urn v1.2.0 // indirect - github.com/mailru/easyjson v0.7.6 // indirect - github.com/mattn/go-colorable v0.1.2 // indirect - github.com/mattn/go-isatty v0.0.12 // indirect + github.com/leodido/go-urn v1.2.1 // indirect + github.com/mailru/easyjson v0.7.7 // indirect + github.com/mattn/go-colorable v0.1.13 // indirect + github.com/mattn/go-isatty v0.0.16 // indirect github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect github.com/mitchellh/go-homedir v1.1.0 // indirect github.com/mitchellh/reflectwalk v1.0.1 // indirect @@ -80,9 +86,12 @@ require ( github.com/moby/term v0.0.0-20221205130635-1aeaba878587 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect github.com/morikuni/aec v1.0.0 // indirect github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/openshift/api v0.0.0-20200930075302-db52bc4ef99f // indirect + github.com/pelletier/go-toml/v2 v2.0.5 // indirect + github.com/perimeterx/marshmallow v1.1.4 // indirect github.com/peterbourgon/diskv v2.0.1+incompatible // indirect github.com/pkg/errors v0.9.1 // indirect github.com/prometheus/client_model v0.3.0 // indirect @@ -93,7 +102,7 @@ require ( github.com/sirupsen/logrus v1.9.0 // indirect github.com/spf13/afero v1.6.0 // indirect github.com/spf13/pflag v1.0.5 // indirect - github.com/ugorji/go/codec v1.1.7 // indirect + github.com/ugorji/go/codec v1.2.7 // indirect github.com/xanzy/ssh-agent v0.3.0 // indirect github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f // indirect github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect diff --git a/index/server/go.sum b/index/server/go.sum index a50bb222..cda2d637 100644 --- a/index/server/go.sum +++ b/index/server/go.sum @@ -101,6 +101,7 @@ github.com/PuerkitoBio/purell v1.0.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbt github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= github.com/PuerkitoBio/urlesc v0.0.0-20160726150825-5bd2802263f2/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= +github.com/RaveNoX/go-jsoncommentstrip v1.0.0/go.mod h1:78ihd09MekBnJnxpICcwzCMzGrKSKYe4AqU6PDYYpjk= github.com/Shopify/logrus-bugsnag v0.0.0-20171204204709-577dee27f20d h1:UrqY+r/OJnIp5u0s1SbQ8dVfLCZJsnvazdBP5hS4iRs= github.com/Shopify/logrus-bugsnag v0.0.0-20171204204709-577dee27f20d/go.mod h1:HI8ITrYtUY+O+ZhtlqUnD8+KwNPOyugEhfP9fdUIaEQ= github.com/acomagu/bufpipe v1.0.3 h1:fxAGrHZTgQ9w5QqVItgzwj235/uYZYgbXitB+dLupOk= @@ -116,6 +117,8 @@ github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYU github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20220418222510-f25a4f6275ed/go.mod h1:F7bn7fEU90QkQ3tnmaTx3LTKLEDqnwWODIYppRQ5hnY= github.com/antlr/antlr4/runtime/Go/antlr v1.4.10/go.mod h1:F7bn7fEU90QkQ3tnmaTx3LTKLEDqnwWODIYppRQ5hnY= +github.com/apapsch/go-jsonmerge/v2 v2.0.0 h1:axGnT1gRIfimI7gJifB699GoE/oq+F2MU7Dml6nw9rQ= +github.com/apapsch/go-jsonmerge/v2 v2.0.0/go.mod h1:lvDnEdqiQrp0O42VQGgmlKpxL1AP2+08jFMw88y4klk= github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= @@ -139,6 +142,7 @@ github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqO github.com/blang/semver v3.1.0+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= github.com/blang/semver/v4 v4.0.0/go.mod h1:IbckMUScFkM3pff0VJDNKRiT6TG/YpiHIM2yvyW5YoQ= +github.com/bmatcuk/doublestar v1.1.1/go.mod h1:UD6OnuiIn0yFxxA2le/rnRU1G4RaI4UvFv1sNto9p6w= github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 h1:DDGfHa7BWjL4YnC6+E63dPcxHo2sUxDIu8g3QgEJdRY= github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4= github.com/bshuster-repo/logrus-logstash-hook v0.4.1/go.mod h1:zsTqEiSzDgAa/8GZR7E1qaXrhYNDKBYy5/dWPTIflbk= @@ -306,6 +310,8 @@ github.com/danieljoos/wincred v1.1.0/go.mod h1:XYlo+eRTsVA9aHGp7NGjFkPla4m+DCL7h github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/deepmap/oapi-codegen v1.12.4 h1:pPmn6qI9MuOtCz82WY2Xaw46EQjgvxednXXrP7g5Q2s= +github.com/deepmap/oapi-codegen v1.12.4/go.mod h1:3lgHGMu6myQ2vqbbTXH2H1o4eXFTGnFiDaOaKKl5yas= github.com/denverdino/aliyungo v0.0.0-20190125010748-a747050bb1ba/go.mod h1:dV8lFg6daOBZbT6/BDGIz6Y3WFGn8juu6G+CQ6LHtl0= github.com/devfile/api/v2 v2.0.0-20211021164004-dabee4e633ed/go.mod h1:d99eTN6QxgzihOOFyOZA+VpUyD4Q1pYRYHZ/ci9J96Q= github.com/devfile/api/v2 v2.0.0-20220117162434-6e6e6a8bc14c/go.mod h1:d99eTN6QxgzihOOFyOZA+VpUyD4Q1pYRYHZ/ci9J96Q= @@ -402,13 +408,15 @@ github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4 github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= github.com/fullsailor/pkcs7 v0.0.0-20190404230743-d7302db945fa/go.mod h1:KnogPXtdwXqoenmZCw6S+25EAm2MkxbG0deNDu4cbSA= github.com/garyburd/redigo v0.0.0-20150301180006-535138d7bcd7/go.mod h1:NR3MbYisc3/PwhQ00EMzDiPmrwpPxAn5GI05/YaO1SY= +github.com/getkin/kin-openapi v0.117.0 h1:QT2DyGujAL09F4NrKDHJGsUoIprlIcFVHWDVDcUFE8A= +github.com/getkin/kin-openapi v0.117.0/go.mod h1:l5e9PaFUo9fyLJCPGQeXI2ML8c3P8BHOEV2VaAVf/pc= github.com/getsentry/raven-go v0.2.0/go.mod h1:KungGk8q33+aIAZUIVWZDr2OfAEBsO49PX4NzFV5kcQ= github.com/ghodss/yaml v0.0.0-20150909031657-73d445a93680/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= -github.com/gin-gonic/gin v1.7.7 h1:3DoBmSbJbZAWqXJC3SLjAPfutPJJRN1U5pALB7EeTTs= -github.com/gin-gonic/gin v1.7.7/go.mod h1:axIBovoeJpVj8S3BwE0uPMTeReE4+AfFtqpqaZ1qq1U= +github.com/gin-gonic/gin v1.8.1 h1:4+fr/el88TOO3ewCmQr8cx/CtZ/umlIRIs5M4NTNjf8= +github.com/gin-gonic/gin v1.8.1/go.mod h1:ji8BvRH1azfM+SYow9zQ6SZMvR8qOMZHmsCuWR9tTTk= github.com/gliderlabs/ssh v0.2.2 h1:6zsha5zo/TWhRhwqCD3+EarCAgZ2yN28ipRnGPnwkI0= github.com/gliderlabs/ssh v0.2.2/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0= github.com/go-git/gcfg v1.5.0 h1:Q5ViNfGF8zFgyJWPqYwA7qGFoMTEiBmdlkcfRmpIMa4= @@ -459,21 +467,26 @@ github.com/go-openapi/spec v0.19.5/go.mod h1:Hm2Jr4jv8G1ciIAo+frC/Ft+rR2kQDh8JHK github.com/go-openapi/swag v0.0.0-20160704191624-1d0bd113de87/go.mod h1:DXUve3Dpr1UfpPtxFw+EFuQ41HhCWZfha5jSVRG7C7I= github.com/go-openapi/swag v0.19.2/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= -github.com/go-openapi/swag v0.19.14 h1:gm3vOOXfiuw5i9p5N9xJvfjvuofpyvLA9Wr6QfK5Fng= github.com/go-openapi/swag v0.19.14/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= +github.com/go-openapi/swag v0.21.1 h1:wm0rhTb5z7qpJRHBdPOMuY4QjVUMbF6/kwoYeRAOrKU= +github.com/go-openapi/swag v0.21.1/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= github.com/go-playground/assert/v2 v2.0.1 h1:MsBgLAaY856+nPRTKrp3/OZK38U/wa0CcBYNjji3q3A= github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= -github.com/go-playground/locales v0.13.0 h1:HyWk6mgj5qFqCT5fjGBuRArbVDfE4hi8+e8ceBS/t7Q= -github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8= -github.com/go-playground/universal-translator v0.17.0 h1:icxd5fm+REJzpZx7ZfpaD876Lmtgy7VtROAbHHXk8no= -github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA= -github.com/go-playground/validator/v10 v10.4.1 h1:pH2c5ADXtd66mxoE0Zm9SUhxE20r7aM3F26W0hOn+GE= -github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4= +github.com/go-playground/locales v0.14.0 h1:u50s323jtVGugKlcYeyzC0etD1HifMjqmJqb8WugfUU= +github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs= +github.com/go-playground/universal-translator v0.18.0 h1:82dyy6p4OuJq4/CByFNOn/jYrnRPArHwAcmLoJZxyho= +github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA= +github.com/go-playground/validator/v10 v10.11.1 h1:prmOlTVv+YjZjmRmNSF3VmspqJIxJWXmqUsHwfTRRkQ= +github.com/go-playground/validator/v10 v10.11.1/go.mod h1:i+3WkQ1FvaUjjxh1kSvIA4dMGDBiPU55YFDl0WbKdWU= github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= +github.com/go-test/deep v1.0.8 h1:TDsG77qcSprGbC6vTN8OuXp5g+J+b5Pcguhf7Zt61VM= +github.com/go-test/deep v1.0.8/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE= github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= +github.com/goccy/go-json v0.9.11 h1:/pAaQDLHEoCq/5FFmSKBswWmK6H0e8g4159Kc/X/nqk= +github.com/goccy/go-json v0.9.11/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/godbus/dbus v0.0.0-20151105175453-c7fdd8b5cd55/go.mod h1:/YcGZj5zSblfDWMMoOzV4fas9FZnQYTkDnsGvmh2Grw= github.com/godbus/dbus v0.0.0-20180201030542-885f9cc04c9c/go.mod h1:/YcGZj5zSblfDWMMoOzV4fas9FZnQYTkDnsGvmh2Grw= github.com/godbus/dbus v0.0.0-20190422162347-ade71ed3457e/go.mod h1:bBOAhwG1umN6/6ZUMtDFBMQR8jRg9O75tm9K00oMsK4= @@ -581,6 +594,8 @@ github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+ github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= +github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= @@ -652,6 +667,8 @@ github.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc= github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/invopop/yaml v0.1.0 h1:YW3WGUoJEXYfzWBjn00zIlrw7brGVD0fUKRYDPAPhrc= +github.com/invopop/yaml v0.1.0/go.mod h1:2XuRLgs/ouIrW3XNzuNj7J3Nvu/Dig5MXvbCEdiBN3Q= github.com/j-keck/arping v0.0.0-20160618110441-2cf9dc699c56/go.mod h1:ymszkNOg6tORTn+6F6j+Jc8TOr5osrynvN6ivFWZ2GA= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= @@ -668,7 +685,6 @@ github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFF github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= @@ -676,6 +692,7 @@ github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHm github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/juju/gnuflag v0.0.0-20171113085948-2ce1bb71843d/go.mod h1:2PavIy+JPciBPrBUjwbNvtwB6RQlve+hkpll6QSNmOE= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= github.com/kevinburke/ssh_config v0.0.0-20201106050909-4977a11b4351 h1:DowS9hvgyYSX4TO5NpyC606/Z4SxnNYbT+WX27or6Ck= @@ -695,8 +712,9 @@ github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= -github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= +github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= @@ -704,8 +722,10 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348 h1:MtvEpTB6LX3vkb4ax0b5D2DHbNAUsen0Gx5wZoq3lV4= github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k= -github.com/leodido/go-urn v1.2.0 h1:hpXL4XnriNwQ/ABnpepYM/1vCLWNDfUNts8dX3xTG6Y= -github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= +github.com/labstack/echo/v4 v4.9.1 h1:GliPYSpzGKlyOhqIbG8nmHBo3i1saKWFOgh41AN3b+Y= +github.com/labstack/gommon v0.4.0 h1:y7cvthEAEbU0yHOf4axH8ZG2NH8knB9iNSoTO8dyIk8= +github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w= +github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY= github.com/linuxkit/virtsock v0.0.0-20201010232012-f8cee7dfc7a3/go.mod h1:3r6x7q95whyfWQpmGZTu3gk3v2YkMi05HEzl7Tf7YEo= github.com/lucasjones/reggen v0.0.0-20200904144131-37ba4fa293bb/go.mod h1:5ELEyG+X8f+meRWHuqUOewBOhvHkl7M76pdGEansxW4= github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= @@ -715,19 +735,22 @@ github.com/mailru/easyjson v0.0.0-20160728113105-d5b7844b561a/go.mod h1:C1wdFJiN github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/mailru/easyjson v0.7.0/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7ldAVICs= -github.com/mailru/easyjson v0.7.6 h1:8yTIVnZgCoiM1TgqoeTl+LfU5Jg6/xL3QhGQnimLYnA= github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/marstr/guid v1.1.0/go.mod h1:74gB1z2wpxxInTG6yaqA7KrtM0NZ+RbrcqDvYHefzho= github.com/matryer/is v1.2.0 h1:92UTHpy8CDwaJ08GqLDzhhuixiBUUD1p3AU6PHddz4A= github.com/matryer/is v1.2.0/go.mod h1:2fLPjFQM9rhQ15aVEtbuwhJinnOqrmgXPNdZsdwlWXA= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= -github.com/mattn/go-colorable v0.1.2 h1:/bC9yWikZXAL9uJdulbSfyVNIR3n3trXl+v8+1sx8mU= github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mattn/go-isatty v0.0.16 h1:bq3VjFmv/sOjHtdEhmkEV4x1AJtvUvOJ2PFAZ5+peKQ= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= github.com/mattn/go-shellwords v1.0.3/go.mod h1:3xCvwCdWdlDJUrvuMn7Wuy9eWs4pE8vqg+NOMyg4B2o= github.com/mattn/go-shellwords v1.0.6/go.mod h1:3xCvwCdWdlDJUrvuMn7Wuy9eWs4pE8vqg+NOMyg4B2o= @@ -773,6 +796,8 @@ github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lN github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw= +github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= github.com/mrunalp/fileutils v0.5.0/go.mod h1:M1WthSahJixYnrXQl/DFQuteStB1weuxD2QJNHXfbSQ= @@ -862,10 +887,15 @@ github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FI github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= github.com/pelletier/go-toml v1.8.1/go.mod h1:T2/BmBdy8dvIRq1a/8aqjN41wvWlN4lrapLU/GW4pbc= github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pelletier/go-toml/v2 v2.0.5 h1:ipoSadvV8oGUjnUbMub59IDPPwfxF694nG/jwbMiyQg= +github.com/pelletier/go-toml/v2 v2.0.5/go.mod h1:OMHamSCAODeSsVrwwvcJOaoN0LIUIaFVNZzmWyNfXas= +github.com/perimeterx/marshmallow v1.1.4 h1:pZLDH9RjlLGGorbXhcaQLhfuV0pFMNfPO55FuFkxqLw= +github.com/perimeterx/marshmallow v1.1.4/go.mod h1:dsXbUu8CRzfYP5a87xpp0xq9S3u0Vchtcl8we9tYaXw= github.com/peterbourgon/diskv v2.0.1+incompatible h1:UBdAOUP5p4RWqPBg048CAvpKN+vxiaj6gdUUzhl4XmI= github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= github.com/phayes/freeport v0.0.0-20180830031419-95f893ade6f2/go.mod h1:iIss55rKnNBTvrwdmkUpLnDpZoAHvWaiq5+iMmen4AE= github.com/phayes/freeport v0.0.0-20220201140144-74d24b5ae9f5 h1:Ii+DKncOVM8Cu1Hc+ETb5K+23HdAMvESYE3ZJ5b5cMI= +github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1-0.20171018195549-f15c970de5b7/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -923,6 +953,9 @@ github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40T github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= +github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8= +github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= @@ -979,6 +1012,7 @@ github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/y github.com/spf13/viper v1.7.0/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5qpdg= github.com/spf13/viper v1.7.1/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5qpdg= github.com/spf13/viper v1.8.1/go.mod h1:o0Pch8wJ9BVSWGQMbra6iw0oQ5oktSIBaujf1rJH9Ns= +github.com/spkg/bom v0.0.0-20160624110644-59b7046e48ad/go.mod h1:qLr4V1qq6nMqFKkMo8ZTx3f+BZEkzsRUY10Xsm2mwU0= github.com/stefanberger/go-pkcs11uri v0.0.0-20201008174630-78d3cae3a980/go.mod h1:AO3tvPzVZ/ayst6UlUKUv6rcPQInYe3IknH3jYhAKu8= github.com/stoewer/go-strcase v1.2.0/go.mod h1:IBiWB2sKIp3wVVQ3Y035++gc+knqhUQag1KpM8ahLw8= github.com/stretchr/objx v0.0.0-20180129172003-8a3f7159479f/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= @@ -986,6 +1020,7 @@ github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+ github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/testify v0.0.0-20180303142811-b89eecf5ca5d/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= @@ -996,6 +1031,7 @@ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= github.com/syndtr/gocapability v0.0.0-20170704070218-db04d3cc01c8/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww= github.com/syndtr/gocapability v0.0.0-20180916011248-d98352740cb2/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww= @@ -1005,13 +1041,15 @@ github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1 github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= github.com/tmc/grpc-websocket-proxy v0.0.0-20201229170055-e5319fda7802/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= -github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw= -github.com/ugorji/go/codec v1.1.7 h1:2SvQaVZ1ouYrrKKwoSk2pzd4A9evlKJb9oTL+OaLUSs= -github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY= +github.com/ugorji/go v1.2.7/go.mod h1:nF9osbDWLy6bDVv/Rtoh6QgnvNDpmCalQV5urGCCS6M= +github.com/ugorji/go/codec v1.2.7 h1:YPXUKf7fYbp/y8xloBqZOw2qaVggbfwMlI8WM3wZUJ0= +github.com/ugorji/go/codec v1.2.7/go.mod h1:WGN1fab3R1fzQlVQTkfxVtIBhWDRqOviHU95kRgeqEY= github.com/urfave/cli v0.0.0-20171014202726-7bc6a0acffa5/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= github.com/urfave/cli v1.22.2/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= +github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= +github.com/valyala/fasttemplate v1.2.2 h1:lxLXG0uE3Qnshl9QyaK6XJxMXlQZELvChBOCmQD0Loo= github.com/vishvananda/netlink v0.0.0-20181108222139-023a6dafdcdf/go.mod h1:+SR5DhBJrl6ZM7CoCKvpw5BKroDKQ+PJqOg65H/2ktk= github.com/vishvananda/netlink v1.1.0/go.mod h1:cTgwzPIzzgDAYoQrMm0EdrjRUBkTqKYppBueQtXaqoE= github.com/vishvananda/netlink v1.1.1-0.20201029203352-d40f9887b852/go.mod h1:twkDnbuQxJYemMlGd4JFIcuhgX83tXhKS2B/PRMpOho= @@ -1128,6 +1166,7 @@ golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20211117183948-ae814b36b871/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220411220226-7b82a4e95df4/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw= golang.org/x/crypto v0.4.0 h1:UVQgzMY87xqpKNgb+kDsll2Igd33HszWHFLmpaRMq/8= @@ -1379,6 +1418,7 @@ golang.org/x/sys v0.0.0-20220422013727-9388b58f7150/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -1687,6 +1727,7 @@ gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gotest.tools v2.2.0+incompatible h1:VsBPFP1AI068pPrMxtb/S8Zkgf9xEmTLJjfM+P5UIEo= diff --git a/index/server/openapi.yaml b/index/server/openapi.yaml new file mode 100644 index 00000000..3135a300 --- /dev/null +++ b/index/server/openapi.yaml @@ -0,0 +1,447 @@ +openapi: 3.0.1 +info: + title: Devfile registry REST API. + description: |- + Documentation of devfile registry REST API. + + The devfile registry REST API is used for interacting with devfile registry. In this documentation, the host is serving the public devfile registry, you can change it to your own host if you want to use private devfile registry. + version: 1.0.1 +servers: +- url: https://registry.stage.devfile.io/ +- url: https://registry.devfile.io/ +paths: + /: + get: + summary: Root endpoint of registry server. + description: |- + Sets up the handler for the root (/) endpoint on the server + If html is requested (i.e. from a web browser), the viewer is displayed, otherwise the devfile index is served. + operationId: serveRootEndpoint + responses: + default: + description: Root response. + content: + text/html: {} + application/json: + schema: + $ref: '#/components/schemas/IndexSchema' + application/yaml: + schema: + $ref: '#/components/schemas/IndexSchema' + /health: + get: + tags: + - server + summary: Get health status. + description: Return the devfile registry health status. + operationId: serveHealthCheck + responses: + 200: + $ref: '#/components/responses/healthResponse' + 404: + description: 'Page not found.' + content: {} + /index: + get: + tags: + - devfile + summary: Gets index schemas of the stack devfiles. + description: |- + Fetches the registry index file content of stack devfile type + from HTTP response + operationId: serveDevfileIndexV1 + requestBody: + description: The request body must be empty. + content: {} + parameters: + - $ref: '#/components/parameters/archParam' + - $ref: '#/components/parameters/iconParam' + responses: + 200: + $ref: '#/components/responses/indexResponse' + 404: + description: 'Page not found.' + content: {} + /index/{indexType}: + get: + tags: + - devfile + summary: Gets index schemas of the devfiles of specific type. + description: |- + Fetches the registry index file content of specific devfile type + from HTTP response + operationId: serveDevfileIndexV1WithType + requestBody: + description: The request body must be empty. + content: {} + parameters: + - name: indexType + in: path + description: The devfile type filter + required: true + schema: + type: string + x-go-name: IndexType + x-go-name: IndexType + - $ref: '#/components/parameters/archParam' + - $ref: '#/components/parameters/iconParam' + responses: + 200: + $ref: '#/components/responses/indexResponse' + 404: + description: 'Page not found.' + content: {} + /v2index: + get: + tags: + - devfile + summary: Gets V2 index schemas of the stack devfiles. + description: |- + Fetches the registry version 2 index file content of + stack devfile type from HTTP response + operationId: serveDevfileIndexV2 + requestBody: + description: The request body must be empty. + content: {} + parameters: + - $ref: '#/components/parameters/archParam' + - $ref: '#/components/parameters/iconParam' + responses: + 200: + $ref: '#/components/responses/v2IndexResponse' + 404: + description: 'Page not found.' + content: {} + /v2index/{indexType}: + get: + tags: + - devfile + summary: Gets V2 index schemas of the devfiles of specific type. + description: |- + Fetches the registry version 2 index file content + of specific devfile type from HTTP response + operationId: serveDevfileIndexV2WithType + requestBody: + description: The request body must be empty. + content: {} + parameters: + - name: indexType + in: path + description: The devfile type filter + required: true + schema: + type: string + x-go-name: IndexType + x-go-name: IndexType + - $ref: '#/components/parameters/archParam' + - $ref: '#/components/parameters/iconParam' + responses: + 200: + $ref: '#/components/responses/v2IndexResponse' + 404: + description: 'Page not found.' + content: {} + /devfiles/{stack}: + get: + tags: + - devfile + summary: Get devfile by stack name. + description: Return the specific stack devfile content of devfile registry. + operationId: serveDevfile + parameters: + - name: stack + in: path + description: The stack name + required: true + schema: + type: string + x-go-name: Stack + x-go-name: Stack + requestBody: + description: The request body must be empty. + content: {} + responses: + 200: + $ref: '#/components/responses/devfileResponse' + 404: + $ref: '#/components/responses/devfileNotFoundResponse' + 500: + $ref: '#/components/responses/devfileErrorResponse' + /devfiles/{stack}/{version}: + get: + tags: + - devfile + summary: Get devfile by stack name. + description: Return the specific stack devfile content of devfile registry. + operationId: serveDevfileWithVersion + parameters: + - name: stack + in: path + description: The stack name + required: true + schema: + type: string + x-go-name: Stack + x-go-name: Stack + - name: version + in: path + description: The version of the stack + required: true + schema: + type: string + x-go-name: Version + x-go-name: Version + requestBody: + description: The request body must be empty. + content: {} + responses: + 200: + $ref: '#/components/responses/devfileResponse' + 404: + $ref: '#/components/responses/devfileNotFoundResponse' + 500: + $ref: '#/components/responses/devfileErrorResponse' + /devfiles/{stack}/starter-projects/{starterProject}: + get: + summary: Fetches starter project by stack and project name + description: |- + Fetches starter project specified in requested registry stack devfile with + version's content and provides an archive (zip) file download as the HTTP response. + operationId: serveDevfileStarterProject + parameters: + - name: stack + in: path + description: The stack name + required: true + schema: + type: string + x-go-name: Stack + x-go-name: Stack + - name: starterProject + in: path + description: The starter project name in the stack devfile + required: true + schema: + type: string + x-go-name: StarterProject + x-go-name: StarterProject + responses: + '200': + $ref: '#/components/responses/starterProjectResponse' + '404': + description: 'Page not found.' + content: {} + '500': + description: Failed to get the devfile or starter project. + $ref: '#/components/responses/devfileErrorResponse' + /devfiles/{stack}/{version}/starter-projects/{starterProject}: + get: + summary: Fetches starter project by stack name, stack version, and project name + description: |- + Fetches starter project specified in requested registry stack devfile with + version's content and provides an archive (zip) file download as the HTTP response. + operationId: serveDevfileStarterProjectWithVersion + parameters: + - name: stack + in: path + description: The stack name + required: true + schema: + type: string + x-go-name: Stack + x-go-name: Stack + - name: version + in: path + description: The version of the stack + required: true + schema: + type: string + x-go-name: Version + x-go-name: Version + - name: starterProject + in: path + description: The starter project name in the stack devfile + required: true + schema: + type: string + x-go-name: StarterProject + x-go-name: StarterProject + responses: + '200': + $ref: '#/components/responses/starterProjectResponse' + '404': + description: 'Page not found.' + content: {} + '500': + description: Failed to get the devfile or starter project. + $ref: '#/components/responses/devfileErrorResponse' + +components: + schemas: + Devfile: + description: Describes the structure of a cloud-native devworkspace and development environment. + x-go-type: v1alpha2.Devfile + x-go-type-import: + path: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 + IndexSchema: + description: The index file schema + x-go-type: schema.Schema + x-go-type-import: + path: github.com/devfile/registry-support/index/generator/schema + IndexParams: + description: IndexParams defines parameters for index endpoints. + type: object + properties: + icon: + $ref: '#/components/schemas/Icon' + arch: + $ref: '#/components/schemas/Architectures' + Icon: + description: Optional devfile icon, can be a URI or a relative path in the project + type: string + Architectures: + description: Optional list of processor architectures that the devfile supports, empty list suggests that the devfile can be used on any architecture + type: array + uniqueItems: false + items: + description: Architecture describes the architecture type + type: string + enum: + - "amd64" + - "arm64" + - "ppc64le" + - "s390x" + parameters: + iconParam: + name: icon + in: query + description: The icon type filter + required: false + schema: + $ref: '#/components/schemas/Icon' + archParam: + name: arch + in: query + description: The target architecture filter + required: false + schema: + $ref: '#/components/schemas/Architectures' + responses: + devfileErrorResponse: + description: Failed to get the devfile. + content: + application/json: + schema: + type: object + properties: + error: + type: string + x-go-name: Error + status: + type: string + x-go-name: Status + application/yaml: + schema: + type: object + properties: + error: + type: string + x-go-name: Error + status: + type: string + x-go-name: Status + devfileNotFoundResponse: + description: Failed to find the devfile. + content: + application/json: + schema: + type: object + properties: + status: + type: string + x-go-name: Status + application/yaml: + schema: + type: object + properties: + status: + type: string + x-go-name: Status + devfileResponse: + description: |- + Successful operation. + + Stack devfile content. + content: + application/json: + schema: + $ref: '#/components/schemas/Devfile' + application/yaml: + schema: + $ref: '#/components/schemas/Devfile' + healthResponse: + description: |- + Successful operation. + + Health status. + content: + application/json: + schema: + type: object + properties: + message: + type: string + x-go-name: Message + required: + - message + application/yaml: + schema: + type: object + properties: + message: + type: string + x-go-name: Message + required: + - message + indexResponse: + description: |- + Successful operation. + + Index content. + content: + application/json: + schema: + $ref: '#/components/schemas/IndexSchema' + application/yaml: + schema: + $ref: '#/components/schemas/IndexSchema' + v2IndexResponse: + description: |- + Successful operation. + + V2 Index content. + content: + application/json: + schema: + x-go-type: schema.Schema + x-go-type-import: + path: github.com/devfile/registry-support/index/generator/schema + application/yaml: + schema: + x-go-type: schema.Schema + x-go-type-import: + path: github.com/devfile/registry-support/index/generator/schema + starterProjectResponse: + description: |- + Successful operation. + + File bytes to download. + content: + application/zip: + schema: + type: string + format: binary + securitySchemes: + basic: + type: http + scheme: basic diff --git a/index/server/pkg/server/endpoint.gen.go b/index/server/pkg/server/endpoint.gen.go new file mode 100644 index 00000000..f0a5de34 --- /dev/null +++ b/index/server/pkg/server/endpoint.gen.go @@ -0,0 +1,515 @@ +// +// Copyright 2023 Red Hat, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by github.com/deepmap/oapi-codegen version v1.12.4. **DO NOT EDIT** +package server // ServerInterface represents all server handlers. +import ( + "bytes" + "compress/gzip" + "encoding/base64" + "fmt" + "net/http" + "net/url" + "path" + "strings" + + "github.com/deepmap/oapi-codegen/pkg/runtime" + "github.com/getkin/kin-openapi/openapi3" + "github.com/gin-gonic/gin" +) + +type ServerInterface interface { + // Root endpoint of registry server. + // (GET /) + ServeRootEndpoint(c *gin.Context) + // Get devfile by stack name. + // (GET /devfiles/{stack}) + ServeDevfile(c *gin.Context, stack string) + // Fetches starter project by stack and project name + // (GET /devfiles/{stack}/starter-projects/{starterProject}) + ServeDevfileStarterProject(c *gin.Context, stack string, starterProject string) + // Get devfile by stack name. + // (GET /devfiles/{stack}/{version}) + ServeDevfileWithVersion(c *gin.Context, stack string, version string) + // Fetches starter project by stack name, stack version, and project name + // (GET /devfiles/{stack}/{version}/starter-projects/{starterProject}) + ServeDevfileStarterProjectWithVersion(c *gin.Context, stack string, version string, starterProject string) + // Get health status. + // (GET /health) + ServeHealthCheck(c *gin.Context) + // Gets index schemas of the stack devfiles. + // (GET /index) + ServeDevfileIndexV1(c *gin.Context, params ServeDevfileIndexV1Params) + // Gets index schemas of the devfiles of specific type. + // (GET /index/{indexType}) + ServeDevfileIndexV1WithType(c *gin.Context, indexType string, params ServeDevfileIndexV1WithTypeParams) + // Gets V2 index schemas of the stack devfiles. + // (GET /v2index) + ServeDevfileIndexV2(c *gin.Context, params ServeDevfileIndexV2Params) + // Gets V2 index schemas of the devfiles of specific type. + // (GET /v2index/{indexType}) + ServeDevfileIndexV2WithType(c *gin.Context, indexType string, params ServeDevfileIndexV2WithTypeParams) +} + +// ServerInterfaceWrapper converts contexts to parameters. +type ServerInterfaceWrapper struct { + Handler ServerInterface + HandlerMiddlewares []MiddlewareFunc + ErrorHandler func(*gin.Context, error, int) +} + +type MiddlewareFunc func(c *gin.Context) + +// ServeRootEndpoint operation middleware +func (siw *ServerInterfaceWrapper) ServeRootEndpoint(c *gin.Context) { + + for _, middleware := range siw.HandlerMiddlewares { + middleware(c) + } + + siw.Handler.ServeRootEndpoint(c) +} + +// ServeDevfile operation middleware +func (siw *ServerInterfaceWrapper) ServeDevfile(c *gin.Context) { + + var err error + + // ------------- Path parameter "stack" ------------- + var stack string + + err = runtime.BindStyledParameter("simple", false, "stack", c.Param("stack"), &stack) + if err != nil { + siw.ErrorHandler(c, fmt.Errorf("Invalid format for parameter stack: %s", err), http.StatusBadRequest) + return + } + + for _, middleware := range siw.HandlerMiddlewares { + middleware(c) + } + + siw.Handler.ServeDevfile(c, stack) +} + +// ServeDevfileStarterProject operation middleware +func (siw *ServerInterfaceWrapper) ServeDevfileStarterProject(c *gin.Context) { + + var err error + + // ------------- Path parameter "stack" ------------- + var stack string + + err = runtime.BindStyledParameter("simple", false, "stack", c.Param("stack"), &stack) + if err != nil { + siw.ErrorHandler(c, fmt.Errorf("Invalid format for parameter stack: %s", err), http.StatusBadRequest) + return + } + + // ------------- Path parameter "starterProject" ------------- + var starterProject string + + err = runtime.BindStyledParameter("simple", false, "starterProject", c.Param("starterProject"), &starterProject) + if err != nil { + siw.ErrorHandler(c, fmt.Errorf("Invalid format for parameter starterProject: %s", err), http.StatusBadRequest) + return + } + + for _, middleware := range siw.HandlerMiddlewares { + middleware(c) + } + + siw.Handler.ServeDevfileStarterProject(c, stack, starterProject) +} + +// ServeDevfileWithVersion operation middleware +func (siw *ServerInterfaceWrapper) ServeDevfileWithVersion(c *gin.Context) { + + var err error + + // ------------- Path parameter "stack" ------------- + var stack string + + err = runtime.BindStyledParameter("simple", false, "stack", c.Param("stack"), &stack) + if err != nil { + siw.ErrorHandler(c, fmt.Errorf("Invalid format for parameter stack: %s", err), http.StatusBadRequest) + return + } + + // ------------- Path parameter "version" ------------- + var version string + + err = runtime.BindStyledParameter("simple", false, "version", c.Param("version"), &version) + if err != nil { + siw.ErrorHandler(c, fmt.Errorf("Invalid format for parameter version: %s", err), http.StatusBadRequest) + return + } + + for _, middleware := range siw.HandlerMiddlewares { + middleware(c) + } + + siw.Handler.ServeDevfileWithVersion(c, stack, version) +} + +// ServeDevfileStarterProjectWithVersion operation middleware +func (siw *ServerInterfaceWrapper) ServeDevfileStarterProjectWithVersion(c *gin.Context) { + + var err error + + // ------------- Path parameter "stack" ------------- + var stack string + + err = runtime.BindStyledParameter("simple", false, "stack", c.Param("stack"), &stack) + if err != nil { + siw.ErrorHandler(c, fmt.Errorf("Invalid format for parameter stack: %s", err), http.StatusBadRequest) + return + } + + // ------------- Path parameter "version" ------------- + var version string + + err = runtime.BindStyledParameter("simple", false, "version", c.Param("version"), &version) + if err != nil { + siw.ErrorHandler(c, fmt.Errorf("Invalid format for parameter version: %s", err), http.StatusBadRequest) + return + } + + // ------------- Path parameter "starterProject" ------------- + var starterProject string + + err = runtime.BindStyledParameter("simple", false, "starterProject", c.Param("starterProject"), &starterProject) + if err != nil { + siw.ErrorHandler(c, fmt.Errorf("Invalid format for parameter starterProject: %s", err), http.StatusBadRequest) + return + } + + for _, middleware := range siw.HandlerMiddlewares { + middleware(c) + } + + siw.Handler.ServeDevfileStarterProjectWithVersion(c, stack, version, starterProject) +} + +// ServeHealthCheck operation middleware +func (siw *ServerInterfaceWrapper) ServeHealthCheck(c *gin.Context) { + + for _, middleware := range siw.HandlerMiddlewares { + middleware(c) + } + + siw.Handler.ServeHealthCheck(c) +} + +// ServeDevfileIndexV1 operation middleware +func (siw *ServerInterfaceWrapper) ServeDevfileIndexV1(c *gin.Context) { + + var err error + + // Parameter object where we will unmarshal all parameters from the context + var params ServeDevfileIndexV1Params + + // ------------- Optional query parameter "arch" ------------- + + err = runtime.BindQueryParameter("form", true, false, "arch", c.Request.URL.Query(), ¶ms.Arch) + if err != nil { + siw.ErrorHandler(c, fmt.Errorf("Invalid format for parameter arch: %s", err), http.StatusBadRequest) + return + } + + // ------------- Optional query parameter "icon" ------------- + + err = runtime.BindQueryParameter("form", true, false, "icon", c.Request.URL.Query(), ¶ms.Icon) + if err != nil { + siw.ErrorHandler(c, fmt.Errorf("Invalid format for parameter icon: %s", err), http.StatusBadRequest) + return + } + + for _, middleware := range siw.HandlerMiddlewares { + middleware(c) + } + + siw.Handler.ServeDevfileIndexV1(c, params) +} + +// ServeDevfileIndexV1WithType operation middleware +func (siw *ServerInterfaceWrapper) ServeDevfileIndexV1WithType(c *gin.Context) { + + var err error + + // ------------- Path parameter "indexType" ------------- + var indexType string + + err = runtime.BindStyledParameter("simple", false, "indexType", c.Param("indexType"), &indexType) + if err != nil { + siw.ErrorHandler(c, fmt.Errorf("Invalid format for parameter indexType: %s", err), http.StatusBadRequest) + return + } + + // Parameter object where we will unmarshal all parameters from the context + var params ServeDevfileIndexV1WithTypeParams + + // ------------- Optional query parameter "arch" ------------- + + err = runtime.BindQueryParameter("form", true, false, "arch", c.Request.URL.Query(), ¶ms.Arch) + if err != nil { + siw.ErrorHandler(c, fmt.Errorf("Invalid format for parameter arch: %s", err), http.StatusBadRequest) + return + } + + // ------------- Optional query parameter "icon" ------------- + + err = runtime.BindQueryParameter("form", true, false, "icon", c.Request.URL.Query(), ¶ms.Icon) + if err != nil { + siw.ErrorHandler(c, fmt.Errorf("Invalid format for parameter icon: %s", err), http.StatusBadRequest) + return + } + + for _, middleware := range siw.HandlerMiddlewares { + middleware(c) + } + + siw.Handler.ServeDevfileIndexV1WithType(c, indexType, params) +} + +// ServeDevfileIndexV2 operation middleware +func (siw *ServerInterfaceWrapper) ServeDevfileIndexV2(c *gin.Context) { + + var err error + + // Parameter object where we will unmarshal all parameters from the context + var params ServeDevfileIndexV2Params + + // ------------- Optional query parameter "arch" ------------- + + err = runtime.BindQueryParameter("form", true, false, "arch", c.Request.URL.Query(), ¶ms.Arch) + if err != nil { + siw.ErrorHandler(c, fmt.Errorf("Invalid format for parameter arch: %s", err), http.StatusBadRequest) + return + } + + // ------------- Optional query parameter "icon" ------------- + + err = runtime.BindQueryParameter("form", true, false, "icon", c.Request.URL.Query(), ¶ms.Icon) + if err != nil { + siw.ErrorHandler(c, fmt.Errorf("Invalid format for parameter icon: %s", err), http.StatusBadRequest) + return + } + + for _, middleware := range siw.HandlerMiddlewares { + middleware(c) + } + + siw.Handler.ServeDevfileIndexV2(c, params) +} + +// ServeDevfileIndexV2WithType operation middleware +func (siw *ServerInterfaceWrapper) ServeDevfileIndexV2WithType(c *gin.Context) { + + var err error + + // ------------- Path parameter "indexType" ------------- + var indexType string + + err = runtime.BindStyledParameter("simple", false, "indexType", c.Param("indexType"), &indexType) + if err != nil { + siw.ErrorHandler(c, fmt.Errorf("Invalid format for parameter indexType: %s", err), http.StatusBadRequest) + return + } + + // Parameter object where we will unmarshal all parameters from the context + var params ServeDevfileIndexV2WithTypeParams + + // ------------- Optional query parameter "arch" ------------- + + err = runtime.BindQueryParameter("form", true, false, "arch", c.Request.URL.Query(), ¶ms.Arch) + if err != nil { + siw.ErrorHandler(c, fmt.Errorf("Invalid format for parameter arch: %s", err), http.StatusBadRequest) + return + } + + // ------------- Optional query parameter "icon" ------------- + + err = runtime.BindQueryParameter("form", true, false, "icon", c.Request.URL.Query(), ¶ms.Icon) + if err != nil { + siw.ErrorHandler(c, fmt.Errorf("Invalid format for parameter icon: %s", err), http.StatusBadRequest) + return + } + + for _, middleware := range siw.HandlerMiddlewares { + middleware(c) + } + + siw.Handler.ServeDevfileIndexV2WithType(c, indexType, params) +} + +// GinServerOptions provides options for the Gin server. +type GinServerOptions struct { + BaseURL string + Middlewares []MiddlewareFunc + ErrorHandler func(*gin.Context, error, int) +} + +// RegisterHandlers creates http.Handler with routing matching OpenAPI spec. +func RegisterHandlers(router *gin.Engine, si ServerInterface) *gin.Engine { + return RegisterHandlersWithOptions(router, si, GinServerOptions{}) +} + +// RegisterHandlersWithOptions creates http.Handler with additional options +func RegisterHandlersWithOptions(router *gin.Engine, si ServerInterface, options GinServerOptions) *gin.Engine { + + errorHandler := options.ErrorHandler + + if errorHandler == nil { + errorHandler = func(c *gin.Context, err error, statusCode int) { + c.JSON(statusCode, gin.H{"msg": err.Error()}) + } + } + + wrapper := ServerInterfaceWrapper{ + Handler: si, + HandlerMiddlewares: options.Middlewares, + ErrorHandler: errorHandler, + } + + router.GET(options.BaseURL+"/", wrapper.ServeRootEndpoint) + + router.GET(options.BaseURL+"/devfiles/:stack", wrapper.ServeDevfile) + + router.GET(options.BaseURL+"/devfiles/:stack/starter-projects/:starterProject", wrapper.ServeDevfileStarterProject) + + router.GET(options.BaseURL+"/devfiles/:stack/:version", wrapper.ServeDevfileWithVersion) + + router.GET(options.BaseURL+"/devfiles/:stack/:version/starter-projects/:starterProject", wrapper.ServeDevfileStarterProjectWithVersion) + + router.GET(options.BaseURL+"/health", wrapper.ServeHealthCheck) + + router.GET(options.BaseURL+"/index", wrapper.ServeDevfileIndexV1) + + router.GET(options.BaseURL+"/index/:indexType", wrapper.ServeDevfileIndexV1WithType) + + router.GET(options.BaseURL+"/v2index", wrapper.ServeDevfileIndexV2) + + router.GET(options.BaseURL+"/v2index/:indexType", wrapper.ServeDevfileIndexV2WithType) + + return router +} + +// Base64 encoded, gzipped, json marshaled Swagger object +var swaggerSpec = []string{ + + "H4sIAAAAAAAC/+xaX3PbuBH/Khi0M01maNHRpTdTvV17SU8PbT22mz6c/QCRSxEXEuABSyk6j757ZwGQ", + "Ik3SlhXnLjfnl4QClsBvd3/7h4DveKLLSitQaPnijlfCiBIQjPslTJJf0Aj9SMEmRlYoteILfp0DQ2HW", + "gIykJEKCtQGWyQLB8IhLkvq5BrPjEVeiBL5w6/GI2ySHUtCafzaQ8QX/U3xAEftZG3/XWdby/T7iMtHq", + "ATg0zXBXPQKCxI4GsSThPW1uwFZaWbB+800mC3hnjDaXYYLGE60QFDrbVVUhE0H44p8sgbzr7FkZXYFB", + "6ZcDWoceCD1fcItGqjWP+KeztT4LuN1mfB9xiwJr+5j4lZci6EFMr36CBN1IF9xOlMVXBG4f3XPteyEL", + "SBlqRmTDHFiw/ow7Wff8b43vda3SZ3DGr2zeL2mwTKp0ymInWeqhUPner3uEAY5bZaDXVZ0kYG1WF4wM", + "6Faf3agbdYUi+djoyIIyTtccRIH5M5CiBGvFGh5z07+CmE8YP9fSQMoXP7av334uW74cjuPN/YMzKvPE", + "dWaWKoVPz06oJa165UU/j1T9lY7X1L3XI5RFYRDMhdFktiNU/kVWfZyZNqVAvuArqYSrS31HPgXfe+L7", + "aodgKdpTvVWFFqkDupkvT3aKY1KDyo3OgvGiw9yZLCtt0DcNmPMFX0vM69Us0WUcYjE2sJYWze7M1hVJ", + "x44p8RoUqaFN8JBX+mH//iagjnfFhzm7z5Z902S4yO23M4MG5j/uQRSskBaZzlhlNO2kTa+/sgxz0auC", + "LGhhIwZlhTu/gK3Xa7A4Ip4IxVbAagsp04oJtettQE0TQjmCsKsA81Mrhwf6HaDzUcRB1SWlHFGm377l", + "ERemdP9XVfLt24Ik7Dd/O//USUVNALQDwhixo99NURhg+r4Hw6KpPQadMcGSQtfpmRIoN079rTYfbSUS", + "YEKlNACFrkpQyEBtpNGqdH6LelTbvBFFlYv5rMHwVLaJSsabeVx9XNOjjVsUNm7WdlRxneY0LRr3Ufca", + "NU4U7L+XS0YUYQYKryghYVI5g1Q+TfERE3dT4ng37djsGTaIs183MVhIaiNx5/bywbMSViZtgiA8fqTV", + "NEes/NtSZXqEOTqpyd8ugokvjYEbZOzy3dU1++5i6cL7uhNBAwkmrY+nTBsmFYIRCUq1ZluJ+eC1GVuS", + "d6RlaRdD5DyWa4u0nAWzoRWcF+tVIZPBOhHb6doxIcmFWgOTSFVgp2vD9FaFpTIntRXKTdaWSCE3Aofq", + "EPNRIoVZE3AjxuAR34Cx3ohvZuezN8QmXYESleQL/o0bipzrnadi+mcNOHTBFaBldeX1FiotwDgL0m+j", + "NbJX8WsGKq20VEi5ysU4mA2YG7XMWI5lQbaiBgcsQspeyRnMWGZ0yQTbwoqtjN5aMK+9cTcStmDolVTa", + "qhA7SCOmMQezlRZ6WdKzPzgCUlK7zffL1IE3G7jUGt8FgHzwiZiJusCvvhuKOMInjMmYfHE3rHikI2s0", + "C1WtLktqXcLkwUXZgTDeT16+iXob31lq1feTlLgErE1wdAWJzGTC7Fh3PxaxE046JO7u4caPYznPb+V6", + "6HB24DJYe3Tg5nm3pUZTQ/cs4ZFvuOQjRcvI4K1fFCz+Xae7PmkGLiGoQZqtdLpjZU1P4BsA1/71qDg/", + "P5/iRCsX3/8s3Ef87fnbo98bfIDvI/7XJ+zbP0rps+yfgK2zV7uOl1zKEmtyZ/NZy29HGReHxv0slEQ/", + "0Wnlpzn5HjDJwbIg3xTVhqCQUrU95KBDBPR460rBjQq58y+2JTL1IpXRG5mCZUL5XmoD7NUvsnrt62/T", + "2DPh+5wfrq8vOiH5EOuvekp+3TEQTcDpWZ1eadqbnoUn0fb1Pxl2d5n9w7O3p8TfxJflIQz7trkQa2BK", + "I8so6mbPGW5ThG9DL1C29cd4ko/vAtl/k3T/P4n5h9Co/P5YHyxHardEHwe2aXU8DVpjo/3E8Eth+sKF", + "qY2SlxJFmF/i9nni9qWY/k6KKakdhedAimiiwvrbjGOq6eCoIu+f2Y/Goz/X/0cOIWKeavR7ly1HG3uQ", + "Ugdgm1TqPypDJnWf6I+mRHeY0Fihc6bVaSz6WdBdIN8od4jQy2IPJjH3Tf3hzTBljZnsIBIfbtgpYh8R", + "Ptx/fwVluX/nc7KzbXBKOJzoZc7GJ3a6nPpjwzv33/Wugv2zEKLpRZ+FE1TRrv2Z+KPlrLdh/68Y+qm4", + "VfjkLLxsV9hPT7zw90T+Nszt0YncMU3lzfyEjNY0HPMJKt+okex2Eo/nf6Dcdv/y9DPY8WH+eQkusOL0", + "FPcgQ27UZLo7jSQvye6PSuinZjx3r0ftnKdHbYpwa2cXcXs9OLMo1jBr/nxJ6tiZfkK4J3a7/38AAAD/", + "//s8zlfcKAAA", +} + +// GetSwagger returns the content of the embedded swagger specification file +// or error if failed to decode +func decodeSpec() ([]byte, error) { + zipped, err := base64.StdEncoding.DecodeString(strings.Join(swaggerSpec, "")) + if err != nil { + return nil, fmt.Errorf("error base64 decoding spec: %s", err) + } + zr, err := gzip.NewReader(bytes.NewReader(zipped)) + if err != nil { + return nil, fmt.Errorf("error decompressing spec: %s", err) + } + var buf bytes.Buffer + _, err = buf.ReadFrom(zr) + if err != nil { + return nil, fmt.Errorf("error decompressing spec: %s", err) + } + + return buf.Bytes(), nil +} + +var rawSpec = decodeSpecCached() + +// a naive cached of a decoded swagger spec +func decodeSpecCached() func() ([]byte, error) { + data, err := decodeSpec() + return func() ([]byte, error) { + return data, err + } +} + +// Constructs a synthetic filesystem for resolving external references when loading openapi specifications. +func PathToRawSpec(pathToFile string) map[string]func() ([]byte, error) { + var res = make(map[string]func() ([]byte, error)) + if len(pathToFile) > 0 { + res[pathToFile] = rawSpec + } + + return res +} + +// GetSwagger returns the Swagger specification corresponding to the generated code +// in this file. The external references of Swagger specification are resolved. +// The logic of resolving external references is tightly connected to "import-mapping" feature. +// Externally referenced files must be embedded in the corresponding golang packages. +// Urls can be supported but this task was out of the scope. +func GetSwagger() (swagger *openapi3.T, err error) { + var resolvePath = PathToRawSpec("") + + loader := openapi3.NewLoader() + loader.IsExternalRefsAllowed = true + loader.ReadFromURIFunc = func(loader *openapi3.Loader, url *url.URL) ([]byte, error) { + var pathToFile = url.String() + pathToFile = path.Clean(pathToFile) + getSpec, ok := resolvePath[pathToFile] + if !ok { + err1 := fmt.Errorf("path not found: %s", pathToFile) + return nil, err1 + } + return getSpec() + } + var specData []byte + specData, err = rawSpec() + if err != nil { + return + } + swagger, err = loader.LoadFromData(specData) + if err != nil { + return + } + return +} diff --git a/index/server/pkg/server/endpoint.go b/index/server/pkg/server/endpoint.go index b4f426ea..649e8623 100644 --- a/index/server/pkg/server/endpoint.go +++ b/index/server/pkg/server/endpoint.go @@ -41,28 +41,31 @@ import ( "gopkg.in/segmentio/analytics-go.v3" ) -// serveRootEndpoint sets up the handler for the root (/) endpoint on the server +type Server struct { +} + +// ServeRootEndpoint sets up the handler for the root (/) endpoint on the server // If html is requested (i.e. from a web browser), the viewer is displayed, otherwise the devfile index is served. -func serveRootEndpoint(c *gin.Context) { +func (*Server) ServeRootEndpoint(c *gin.Context) { // Determine if text/html was requested by the client acceptHeader := c.Request.Header.Values("Accept") if util.IsHtmlRequested(acceptHeader) { c.Redirect(http.StatusFound, "/viewer") } else { - serveDevfileIndex(c, true) + c.Redirect(http.StatusFound, "/index") } } -func serveDevfileIndexV1(c *gin.Context) { - serveDevfileIndex(c, true) +func (*Server) ServeDevfileIndexV1(c *gin.Context, params ServeDevfileIndexV1Params) { + ServeDevfileIndex(c, true, IndexParams(params)) } -func serveDevfileIndexV2(c *gin.Context) { - serveDevfileIndex(c, false) +func (*Server) ServeDevfileIndexV2(c *gin.Context, params ServeDevfileIndexV2Params) { + ServeDevfileIndex(c, false, IndexParams(params)) } -// serveDevfileIndex serves the index.json file located in the container at `serveDevfileIndex` -func serveDevfileIndex(c *gin.Context, wantV1Index bool) { +// ServeDevfileIndex serves the index.json file located in the container at `ServeDevfileIndex` +func ServeDevfileIndex(c *gin.Context, wantV1Index bool, params IndexParams) { // Start the counter for the request var status string timer := prometheus.NewTimer(prometheus.ObserverFunc(func(v float64) { @@ -72,35 +75,30 @@ func serveDevfileIndex(c *gin.Context, wantV1Index bool) { timer.ObserveDuration() }() - // append the devfile type, for endpoint /index without type - c.Params = append(c.Params, gin.Param{Key: "type", Value: string(indexSchema.StackDevfileType)}) - // Serve the index.json file - buildIndexAPIResponse(c, wantV1Index) + buildIndexAPIResponse(c, string(indexSchema.StackDevfileType), wantV1Index, params) } -func serveDevfileIndexV1WithType(c *gin.Context) { +func (*Server) ServeDevfileIndexV1WithType(c *gin.Context, indexType string, params ServeDevfileIndexV1WithTypeParams) { // Serve the index with type - buildIndexAPIResponse(c, true) + buildIndexAPIResponse(c, indexType, true, IndexParams(params)) } -func serveDevfileIndexV2WithType(c *gin.Context) { +func (*Server) ServeDevfileIndexV2WithType(c *gin.Context, indexType string, params ServeDevfileIndexV2WithTypeParams) { // Serve the index with type - buildIndexAPIResponse(c, false) + buildIndexAPIResponse(c, indexType, false, IndexParams(params)) } -// serveHealthCheck serves endpoint `/health` for registry health check -func serveHealthCheck(c *gin.Context) { - c.JSON(http.StatusOK, gin.H{ - "message": "the server is up and running", +// ServeHealthCheck serves endpoint `/health` for registry health check +func (*Server) ServeHealthCheck(c *gin.Context) { + c.JSON(http.StatusOK, HealthResponse{ + Message: "the server is up and running", }) } -func serveDevfileWithVersion(c *gin.Context) { - name := c.Param("name") - version := c.Param("version") +func (*Server) ServeDevfileWithVersion(c *gin.Context, name string, version string) { bytes, devfileIndex := fetchDevfile(c, name, version) if len(bytes) != 0 { @@ -128,27 +126,22 @@ func serveDevfileWithVersion(c *gin.Context) { } } -// serveDevfile returns the devfile content -func serveDevfile(c *gin.Context) { +// ServeDevfile returns the devfile content +func (s *Server) ServeDevfile(c *gin.Context, name string) { // append the stack version, for endpoint /devfiles/name without version - c.Params = append(c.Params, gin.Param{Key: "version", Value: "default"}) - serveDevfileWithVersion(c) + s.ServeDevfileWithVersion(c, name, "default") } -// serveDevfileStarterProject returns the starter project content for the devfile using default version -func serveDevfileStarterProject(c *gin.Context) { - c.Params = append(c.Params, gin.Param{Key: "version", Value: "default"}) - serveDevfileStarterProjectWithVersion(c) +// ServeDevfileStarterProject returns the starter project content for the devfile using default version +func (s *Server) ServeDevfileStarterProject(c *gin.Context, name string, starterProject string) { + s.ServeDevfileStarterProjectWithVersion(c, name, "default", starterProject) } -// serveDevfileStarterProject returns the starter project content for the devfile using specified version -func serveDevfileStarterProjectWithVersion(c *gin.Context) { - devfileName := c.Param("name") - version := c.Param("version") - starterProjectName := c.Param("starterProjectName") - downloadTmpLoc := path.Join("/tmp", starterProjectName) - stackLoc := path.Join(stacksPath, devfileName) - devfileBytes, devfileIndex := fetchDevfile(c, devfileName, version) +// ServeDevfileStarterProject returns the starter project content for the devfile using specified version +func (*Server) ServeDevfileStarterProjectWithVersion(c *gin.Context, name string, version string, starterProject string) { + downloadTmpLoc := path.Join("/tmp", starterProject) + stackLoc := path.Join(stacksPath, name) + devfileBytes, devfileIndex := fetchDevfile(c, name, version) if len(devfileIndex.Versions) > 1 { versionMap, err := util.MakeVersionMap(devfileIndex) @@ -170,7 +163,7 @@ func serveDevfileStarterProjectWithVersion(c *gin.Context) { } else { content, err := parser.ParseFromData(devfileBytes) filterOptions := common.DevfileOptions{ - FilterByName: starterProjectName, + FilterByName: starterProject, } var starterProjects []v1alpha2.StarterProject var downloadBytes []byte @@ -179,7 +172,7 @@ func serveDevfileStarterProjectWithVersion(c *gin.Context) { log.Print(err.Error()) c.JSON(http.StatusInternalServerError, gin.H{ "error": err.Error(), - "status": fmt.Sprintf("failed to parse the devfile of %s", devfileName), + "status": fmt.Sprintf("failed to parse the devfile of %s", name), }) return } @@ -189,28 +182,28 @@ func serveDevfileStarterProjectWithVersion(c *gin.Context) { log.Print(err.Error()) c.JSON(http.StatusInternalServerError, gin.H{ "error": err.Error(), - "status": fmt.Sprintf("problem in reading starter project %s of devfile %s", starterProjectName, devfileName), + "status": fmt.Sprintf("problem in reading starter project %s of devfile %s", starterProject, name), }) return } else if len(starterProjects) == 0 { c.JSON(http.StatusNotFound, gin.H{ - "status": fmt.Sprintf("the starter project named %s does not exist in the %s devfile", starterProjectName, devfileName), + "status": fmt.Sprintf("the starter project named %s does not exist in the %s devfile", starterProject, name), }) return } - if starterProject := starterProjects[0]; starterProject.Git != nil { + if selStarterProject := starterProjects[0]; selStarterProject.Git != nil { gitScheme := indexSchema.Git{ - Remotes: starterProject.Git.Remotes, + Remotes: selStarterProject.Git.Remotes, RemoteName: "origin", - SubDir: starterProject.SubDir, + SubDir: selStarterProject.SubDir, } - if starterProject.Git.CheckoutFrom != nil { - if starterProject.Git.CheckoutFrom.Remote != "" { - gitScheme.RemoteName = starterProject.Git.CheckoutFrom.Remote + if selStarterProject.Git.CheckoutFrom != nil { + if selStarterProject.Git.CheckoutFrom.Remote != "" { + gitScheme.RemoteName = selStarterProject.Git.CheckoutFrom.Remote } - gitScheme.Revision = starterProject.Git.CheckoutFrom.Revision + gitScheme.Revision = selStarterProject.Git.CheckoutFrom.Revision } gitScheme.Url = gitScheme.Remotes[gitScheme.RemoteName] @@ -220,18 +213,18 @@ func serveDevfileStarterProjectWithVersion(c *gin.Context) { c.JSON(http.StatusInternalServerError, gin.H{ "error": err.Error(), "status": fmt.Sprintf("Problem with downloading starter project %s from location: %s", - starterProjectName, gitScheme.Url), + starterProject, gitScheme.Url), }) return } - } else if starterProject.Zip != nil { - if _, err = url.ParseRequestURI(starterProject.Zip.Location); err != nil { - localLoc := path.Join(stackLoc, starterProject.Zip.Location) + } else if selStarterProject.Zip != nil { + if _, err = url.ParseRequestURI(selStarterProject.Zip.Location); err != nil { + localLoc := path.Join(stackLoc, selStarterProject.Zip.Location) log.Printf("zip location is not a valid http url: %v\nTrying local path %s..", err, localLoc) // If subdirectory is specified for starter project download then extract subdirectory // and create new archive for download. - if starterProject.SubDir != "" { + if selStarterProject.SubDir != "" { downloadFilePath := fmt.Sprintf("%s.zip", downloadTmpLoc) if _, err = os.Stat(downloadTmpLoc); os.IsExist(err) { @@ -242,20 +235,20 @@ func serveDevfileStarterProjectWithVersion(c *gin.Context) { "error": err.Error(), "status": fmt.Sprintf("Problem removing existing temporary download directory '%s' for starter project %s", downloadTmpLoc, - starterProjectName), + starterProject), }) return } } - _, err = dfutil.Unzip(localLoc, downloadTmpLoc, starterProject.SubDir) + _, err = dfutil.Unzip(localLoc, downloadTmpLoc, selStarterProject.SubDir) if err != nil { log.Print(err.Error()) c.JSON(http.StatusInternalServerError, gin.H{ "error": err.Error(), "status": fmt.Sprintf("Problem with reading subDir '%s' of starter project %s at %s", - starterProject.SubDir, - starterProjectName, + selStarterProject.SubDir, + starterProject, localLoc), }) return @@ -267,8 +260,8 @@ func serveDevfileStarterProjectWithVersion(c *gin.Context) { c.JSON(http.StatusInternalServerError, gin.H{ "error": err.Error(), "status": fmt.Sprintf("Problem with archiving subDir '%s' of starter project %s at %s", - starterProject.SubDir, - starterProjectName, + selStarterProject.SubDir, + starterProject, downloadFilePath), }) return @@ -282,25 +275,25 @@ func serveDevfileStarterProjectWithVersion(c *gin.Context) { log.Print(err.Error()) c.JSON(http.StatusInternalServerError, gin.H{ "error": err.Error(), - "status": fmt.Sprintf("Problem with reading starter project %s at %s", starterProjectName, + "status": fmt.Sprintf("Problem with reading starter project %s at %s", starterProject, localLoc), }) return } } else { - downloadBytes, err = libutil.DownloadStackFromZipUrl(starterProject.Zip.Location, starterProject.SubDir, downloadTmpLoc) + downloadBytes, err = libutil.DownloadStackFromZipUrl(selStarterProject.Zip.Location, selStarterProject.SubDir, downloadTmpLoc) if err != nil { log.Print(err.Error()) c.JSON(http.StatusInternalServerError, gin.H{ "error": err.Error(), - "status": fmt.Sprintf("Problem with downloading starter project %s", starterProjectName), + "status": fmt.Sprintf("Problem with downloading starter project %s", starterProject), }) return } } } else { c.JSON(http.StatusBadRequest, gin.H{ - "status": fmt.Sprintf("Starter project %s has no source to download from", starterProjectName), + "status": fmt.Sprintf("Starter project %s has no source to download from", starterProject), }) return } @@ -317,7 +310,7 @@ func serveDevfileStarterProjectWithVersion(c *gin.Context) { Context: util.SetContext(c), Properties: analytics.NewProperties(). Set("devfile", devfileName). - Set("starterProject", starterProjectName). + Set("starterProject", starterProject). Set("client", client), }) if err != nil { @@ -325,18 +318,18 @@ func serveDevfileStarterProjectWithVersion(c *gin.Context) { } } - c.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s.zip\"", starterProjectName)) + c.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s.zip\"", starterProject)) c.Data(http.StatusAccepted, starterProjectMediaType, downloadBytes) } } -// serveHeadlessUI handles registry viewer proxy requests in headless mode -func serveHeadlessUI(c *gin.Context) { - c.String(http.StatusBadRequest, "registry viewer is not available in headless mode") -} +// ServeUI handles registry viewer proxy requests +func ServeUI(c *gin.Context) { + if headless { + c.String(http.StatusBadRequest, "registry viewer is not available in headless mode") + return + } -// serveUI handles registry viewer proxy requests in headed mode -func serveUI(c *gin.Context) { remote, err := url.Parse(scheme + "://" + viewerService + "/viewer/") if err != nil { panic(err) @@ -363,11 +356,18 @@ func serveUI(c *gin.Context) { } // buildIndexAPIResponse builds the response of the REST API of getting the devfile index -func buildIndexAPIResponse(c *gin.Context, wantV1Index bool) { +func buildIndexAPIResponse(c *gin.Context, indexType string, wantV1Index bool, params IndexParams) { - indexType := c.Param("type") - iconType := c.Query("icon") - archs := c.QueryArray("arch") + iconType := "" + archs := []string{} + + if params.Icon != nil { + iconType = *params.Icon + } + + if params.Arch != nil { + archs = append(archs, *params.Arch...) + } var bytes []byte var responseIndexPath, responseBase64IndexPath string @@ -631,3 +631,54 @@ func fetchDevfile(c *gin.Context, name string, version string) ([]byte, indexSch }) return []byte{}, indexSchema.Schema{} } + +func ServeOciProxy(c *gin.Context) { + proxyPath := c.Param("proxyPath") + remote, err := url.Parse(scheme + "://" + registryService + "/v2") + if err != nil { + panic(err) + } + + proxy := httputil.NewSingleHostReverseProxy(remote) + + // Set up the request to the proxy + // Track event for telemetry for GET requests only + if enableTelemetry && c.Request.Method == http.MethodGet && proxyPath != "" { + var name string + var resource string + parts := strings.Split(proxyPath, "/") + // Check proxyPath (e.g. /devfile-catalog/java-quarkus/blobs/sha256:d913cab108c3bc1bd06ce61f1e0cdb6eea2222a7884378f7e656fa26249990b9) + if len(parts) == 5 { + name = parts[2] + resource = parts[3] + } + + //Ignore events from the registry-viewer and DevConsole since those are tracked on the client side. Ignore indirect calls from clients. + if resource == "blobs" && !util.IsWebClient(c) && !util.IsIndirectCall(c) { + user := util.GetUser(c) + client := util.GetClient(c) + + err := util.TrackEvent(analytics.Track{ + Event: eventTrackMap["download"], + UserId: user, + Context: util.SetContext(c), + Properties: analytics.NewProperties(). + Set("name", name). + Set("registry", registry). + Set("client", client), + }) + if err != nil { + log.Println(err.Error()) + } + } + } + + proxy.Director = func(req *http.Request) { + req.Header.Add("X-Forwarded-Host", req.Host) + req.Header.Add("X-Origin-Host", remote.Host) + req.URL.Scheme = remote.Scheme + req.URL.Host = remote.Host + } + + proxy.ServeHTTP(c.Writer, c.Request) +} diff --git a/index/server/pkg/server/endpoint_test.go b/index/server/pkg/server/endpoint_test.go index 89c8c1ce..2887d9ce 100644 --- a/index/server/pkg/server/endpoint_test.go +++ b/index/server/pkg/server/endpoint_test.go @@ -368,10 +368,11 @@ func TestServeHealthCheck(t *testing.T) { setupVars() + server := &Server{} w := httptest.NewRecorder() c, _ := gin.CreateTestContext(w) - serveHealthCheck(c) + server.ServeHealthCheck(c) wantStatusCode := http.StatusOK if gotStatusCode := w.Code; !reflect.DeepEqual(gotStatusCode, wantStatusCode) { @@ -416,10 +417,11 @@ func TestServeDevfileIndexV1(t *testing.T) { gin.SetMode(gin.TestMode) + server := &Server{} w := httptest.NewRecorder() c, _ := gin.CreateTestContext(w) - serveDevfileIndexV1(c) + server.ServeDevfileIndexV1(c, ServeDevfileIndexV1Params{}) if gotStatusCode := w.Code; !reflect.DeepEqual(gotStatusCode, wantStatusCode) { t.Errorf("Did not get expected status code, Got: %v, Expected: %v", gotStatusCode, wantStatusCode) @@ -464,6 +466,7 @@ func TestServeDevfileIndexV1WithType(t *testing.T) { wantCode: http.StatusNotFound, }, } + server := &Server{} for _, test := range tests { t.Run(test.name, func(tt *testing.T) { @@ -474,7 +477,9 @@ func TestServeDevfileIndexV1WithType(t *testing.T) { c.Params = append(c.Params, test.params...) - serveDevfileIndexV1WithType(c) + indexType, _ := c.Params.Get("type") + + server.ServeDevfileIndexV1WithType(c, indexType, ServeDevfileIndexV1WithTypeParams{}) if gotStatusCode := w.Code; !reflect.DeepEqual(gotStatusCode, test.wantCode) { t.Errorf("Did not get expected status code, Got: %v, Expected: %v", gotStatusCode, test.wantCode) @@ -492,10 +497,11 @@ func TestServeDevfileIndexV2(t *testing.T) { gin.SetMode(gin.TestMode) + server := &Server{} w := httptest.NewRecorder() c, _ := gin.CreateTestContext(w) - serveDevfileIndexV2(c) + server.ServeDevfileIndexV2(c, ServeDevfileIndexV2Params{}) if gotStatusCode := w.Code; !reflect.DeepEqual(gotStatusCode, wantStatusCode) { t.Errorf("Did not get expected status code, Got: %v, Expected: %v", gotStatusCode, wantStatusCode) @@ -585,6 +591,7 @@ func TestServeDevfileIndexV2WithType(t *testing.T) { wantCode: http.StatusNotFound, }, } + server := &Server{} for _, test := range tests { t.Run(test.name, func(tt *testing.T) { @@ -597,7 +604,9 @@ func TestServeDevfileIndexV2WithType(t *testing.T) { c.Params = append(c.Params, test.params...) c.Request.URL.RawQuery = test.query.Encode() - serveDevfileIndexV2WithType(c) + indexType, _ := c.Params.Get("type") + + server.ServeDevfileIndexV2WithType(c, indexType, ServeDevfileIndexV2WithTypeParams{}) if gotStatusCode := w.Code; !reflect.DeepEqual(gotStatusCode, test.wantCode) { t.Errorf("Did not get expected status code, Got: %v, Expected: %v", gotStatusCode, test.wantCode) @@ -649,6 +658,7 @@ func TestServeDevfile(t *testing.T) { } defer closeServer() setupVars() + server := &Server{} for _, test := range tests { t.Run(test.name, func(tt *testing.T) { @@ -659,7 +669,8 @@ func TestServeDevfile(t *testing.T) { c.Params = append(c.Params, test.params...) - serveDevfile(c) + name, _ := c.Params.Get("name") + server.ServeDevfile(c, name) if gotStatusCode := w.Code; !reflect.DeepEqual(gotStatusCode, test.wantCode) { t.Errorf("Did not get expected status code, Got: %v, Expected: %v", gotStatusCode, test.wantCode) @@ -790,6 +801,7 @@ func TestServeDevfileWithVersion(t *testing.T) { } defer closeServer() setupVars() + server := &Server{} for _, test := range tests { t.Run(test.name, func(tt *testing.T) { @@ -802,7 +814,9 @@ func TestServeDevfileWithVersion(t *testing.T) { c.Params = append(c.Params, test.params...) c.Request.URL.RawQuery = test.query.Encode() - serveDevfileWithVersion(c) + name, _ := c.Params.Get("name") + version, _ := c.Params.Get("version") + server.ServeDevfileWithVersion(c, name, version) if gotStatusCode := w.Code; !reflect.DeepEqual(gotStatusCode, test.wantCode) { t.Errorf("Did not get expected status code, Got: %v, Expected: %v", gotStatusCode, test.wantCode) @@ -897,6 +911,7 @@ func TestServeDevfileStarterProject(t *testing.T) { } defer closeServer() setupVars() + server := &Server{} for _, test := range tests { t.Run(test.name, func(tt *testing.T) { @@ -907,7 +922,10 @@ func TestServeDevfileStarterProject(t *testing.T) { c.Params = append(c.Params, test.params...) - serveDevfileStarterProject(c) + name, _ := c.Params.Get("name") + starterProject, _ := c.Params.Get("starterProjectName") + + server.ServeDevfileStarterProject(c, name, starterProject) if gotStatusCode := w.Code; !reflect.DeepEqual(gotStatusCode, test.wantCode) { t.Errorf("Did not get expected status code, Got: %v, Expected: %v", gotStatusCode, test.wantCode) @@ -999,6 +1017,7 @@ func TestServeDevfileStarterProjectWithVersion(t *testing.T) { } defer closeServer() setupVars() + server := &Server{} for _, test := range tests { t.Run(test.name, func(tt *testing.T) { @@ -1009,7 +1028,11 @@ func TestServeDevfileStarterProjectWithVersion(t *testing.T) { c.Params = append(c.Params, test.params...) - serveDevfileStarterProjectWithVersion(c) + name, _ := c.Params.Get("name") + version, _ := c.Params.Get("version") + starterProject, _ := c.Params.Get("starterProjectName") + + server.ServeDevfileStarterProjectWithVersion(c, name, version, starterProject) if gotStatusCode := w.Code; !reflect.DeepEqual(gotStatusCode, test.wantCode) { t.Errorf("Did not get expected status code, Got: %v, Expected: %v", gotStatusCode, test.wantCode) @@ -1106,9 +1129,8 @@ func TestOCIServerProxy(t *testing.T) { if err != nil { t.Fatalf("Did not expect error: %v", err) } - c.Params = append(c.Params, gin.Param{Key: "proxyPath", Value: test.url}) - ociServerProxy(c) + ServeOciProxy(c) // Force writes response headers to combat a response recording issue c.Writer.WriteHeaderNow() @@ -1132,7 +1154,8 @@ func TestServeHeadlessUI(t *testing.T) { w := httptest.NewRecorder() c, _ := gin.CreateTestContext(w) - serveHeadlessUI(c) + headless = true + ServeUI(c) if gotStatusCode, gotBody := w.Code, w.Body.String(); !reflect.DeepEqual(gotStatusCode, wantCode) { t.Errorf("Did not get expected status code, Got: %v, Expected: %v", gotStatusCode, wantCode) diff --git a/index/server/pkg/server/index.go b/index/server/pkg/server/index.go index d8e68a7f..e9bb5a3f 100644 --- a/index/server/pkg/server/index.go +++ b/index/server/pkg/server/index.go @@ -20,13 +20,8 @@ import ( "io/ioutil" "log" "net/http" - "net/http/httputil" - "net/url" - "strings" "time" - "github.com/devfile/registry-support/index/server/pkg/util" - "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promhttp" @@ -34,9 +29,9 @@ import ( indexLibrary "github.com/devfile/registry-support/index/generator/library" indexSchema "github.com/devfile/registry-support/index/generator/schema" + oapiMiddleware "github.com/deepmap/oapi-codegen/pkg/gin-middleware" _ "github.com/devfile/registry-support/index/server/docs" "github.com/gin-gonic/gin" - "gopkg.in/segmentio/analytics-go.v3" "k8s.io/apimachinery/pkg/util/wait" ) @@ -151,93 +146,40 @@ func ServeRegistry() { log.Println("Telemetry is not enabled") } + // Get OpenAPI spec + swagger, err := GetSwagger() + if err != nil { + log.Fatalf("Error loading OpenAPI spec: %v", err) + } + + swagger.Servers = nil + + // Create server context + server := &Server{} + // Start the server and serve requests and index.json router := gin.Default() - // Registry REST APIs - router.GET("/", serveRootEndpoint) - router.GET("/index", serveDevfileIndexV1) - router.GET("/index/:type", serveDevfileIndexV1WithType) - router.GET("/health", serveHealthCheck) - router.GET("/devfiles/:name", serveDevfile) - router.GET("/devfiles/:name/:version", serveDevfileWithVersion) - router.GET("/devfiles/:name/starter-projects/:starterProjectName", serveDevfileStarterProject) - router.GET("/devfiles/:name/:version/starter-projects/:starterProjectName", serveDevfileStarterProjectWithVersion) - - // Registry REST APIs for index v2 - router.GET("/v2index", serveDevfileIndexV2) - router.GET("/v2index/:type", serveDevfileIndexV2WithType) + // Register Devfile Registry REST APIs and use OpenAPI validator middleware + router = RegisterHandlersWithOptions(router, server, GinServerOptions{ + Middlewares: []MiddlewareFunc{ + func(c *gin.Context) { + oapiMiddleware.OapiRequestValidator(swagger)(c) + }, + }, + }) // Set up a simple proxy for /v2 endpoints // Only allow HEAD and GET requests - router.HEAD("/v2/*proxyPath", ociServerProxy) - router.GET("/v2/*proxyPath", ociServerProxy) + router.HEAD("/v2/*proxyPath", ServeOciProxy) + router.GET("/v2/*proxyPath", ServeOciProxy) // Set up routes for the registry viewer - if headless { - router.GET("/viewer", serveHeadlessUI) - router.GET("/viewer/*proxyPath", serveHeadlessUI) - } else { - router.GET("/viewer", serveUI) - router.GET("/viewer/*proxyPath", serveUI) - } + router.GET("/viewer", ServeUI) + router.GET("/viewer/*proxyPath", ServeUI) // Serve static content for stacks router.Static("/stacks", stacksPath) router.Run(":8080") } - -// ociServerProxy forwards all GET requests on /v2 to the OCI registry server -func ociServerProxy(c *gin.Context) { - remote, err := url.Parse(scheme + "://" + registryService + "/v2") - if err != nil { - panic(err) - } - - proxy := httputil.NewSingleHostReverseProxy(remote) - - // Set up the request to the proxy - // Track event for telemetry for GET requests only - if enableTelemetry && c.Request.Method == http.MethodGet { - proxyPath := c.Param("proxyPath") - if proxyPath != "" { - var name string - var resource string - parts := strings.Split(proxyPath, "/") - // Check proxyPath (e.g. /devfile-catalog/java-quarkus/blobs/sha256:d913cab108c3bc1bd06ce61f1e0cdb6eea2222a7884378f7e656fa26249990b9) - if len(parts) == 5 { - name = parts[2] - resource = parts[3] - } - - //Ignore events from the registry-viewer and DevConsole since those are tracked on the client side. Ignore indirect calls from clients. - if resource == "blobs" && !util.IsWebClient(c) && !util.IsIndirectCall(c) { - user := util.GetUser(c) - client := util.GetClient(c) - - err := util.TrackEvent(analytics.Track{ - Event: eventTrackMap["download"], - UserId: user, - Context: util.SetContext(c), - Properties: analytics.NewProperties(). - Set("name", name). - Set("registry", registry). - Set("client", client), - }) - if err != nil { - log.Println(err.Error()) - } - } - } - } - - proxy.Director = func(req *http.Request) { - req.Header.Add("X-Forwarded-Host", req.Host) - req.Header.Add("X-Origin-Host", remote.Host) - req.URL.Scheme = remote.Scheme - req.URL.Host = remote.Host - } - - proxy.ServeHTTP(c.Writer, c.Request) -} diff --git a/index/server/pkg/server/types.gen.go b/index/server/pkg/server/types.gen.go new file mode 100644 index 00000000..49e2d4e0 --- /dev/null +++ b/index/server/pkg/server/types.gen.go @@ -0,0 +1,110 @@ +// +// Copyright 2023 Red Hat, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by github.com/deepmap/oapi-codegen version v1.12.4. **DO NOT EDIT** +package server + +import ( + "github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2" + "github.com/devfile/registry-support/index/generator/schema" +) + +// Architectures Optional list of processor architectures that the devfile supports, empty list suggests that the devfile can be used on any architecture +type Architectures = []string + +// Devfile Describes the structure of a cloud-native devworkspace and development environment. +type Devfile = v1alpha2.Devfile + +// Icon Optional devfile icon, can be a URI or a relative path in the project +type Icon = string + +// IndexParams IndexParams defines parameters for index endpoints. +type IndexParams struct { + // Arch Optional list of processor architectures that the devfile supports, empty list suggests that the devfile can be used on any architecture + Arch *Architectures `json:"arch,omitempty"` + + // Icon Optional devfile icon, can be a URI or a relative path in the project + Icon *Icon `json:"icon,omitempty"` +} + +// IndexSchema The index file schema +type IndexSchema = schema.Schema + +// ArchParam Optional list of processor architectures that the devfile supports, empty list suggests that the devfile can be used on any architecture +type ArchParam = Architectures + +// IconParam Optional devfile icon, can be a URI or a relative path in the project +type IconParam = Icon + +// DevfileErrorResponse defines model for devfileErrorResponse. +type DevfileErrorResponse struct { + Error *string `json:"error,omitempty"` + Status *string `json:"status,omitempty"` +} + +// DevfileNotFoundResponse defines model for devfileNotFoundResponse. +type DevfileNotFoundResponse struct { + Status *string `json:"status,omitempty"` +} + +// DevfileResponse Describes the structure of a cloud-native devworkspace and development environment. +type DevfileResponse = Devfile + +// HealthResponse defines model for healthResponse. +type HealthResponse struct { + Message string `json:"message"` +} + +// IndexResponse The index file schema +type IndexResponse = IndexSchema + +// V2IndexResponse defines model for v2IndexResponse. +type V2IndexResponse = schema.Schema + +// ServeDevfileIndexV1Params defines parameters for ServeDevfileIndexV1. +type ServeDevfileIndexV1Params struct { + // Arch The target architecture filter + Arch *ArchParam `form:"arch,omitempty" json:"arch,omitempty"` + + // Icon The icon type filter + Icon *IconParam `form:"icon,omitempty" json:"icon,omitempty"` +} + +// ServeDevfileIndexV1WithTypeParams defines parameters for ServeDevfileIndexV1WithType. +type ServeDevfileIndexV1WithTypeParams struct { + // Arch The target architecture filter + Arch *ArchParam `form:"arch,omitempty" json:"arch,omitempty"` + + // Icon The icon type filter + Icon *IconParam `form:"icon,omitempty" json:"icon,omitempty"` +} + +// ServeDevfileIndexV2Params defines parameters for ServeDevfileIndexV2. +type ServeDevfileIndexV2Params struct { + // Arch The target architecture filter + Arch *ArchParam `form:"arch,omitempty" json:"arch,omitempty"` + + // Icon The icon type filter + Icon *IconParam `form:"icon,omitempty" json:"icon,omitempty"` +} + +// ServeDevfileIndexV2WithTypeParams defines parameters for ServeDevfileIndexV2WithType. +type ServeDevfileIndexV2WithTypeParams struct { + // Arch The target architecture filter + Arch *ArchParam `form:"arch,omitempty" json:"arch,omitempty"` + + // Icon The icon type filter + Icon *IconParam `form:"icon,omitempty" json:"icon,omitempty"` +} diff --git a/index/server/swagger.yaml b/index/server/swagger.yaml deleted file mode 100644 index 5b2de2ae..00000000 --- a/index/server/swagger.yaml +++ /dev/null @@ -1,2728 +0,0 @@ -consumes: -- application/json -- application/yaml -definitions: - ApplyCommand: - properties: - component: - description: Describes component that will be applied - type: string - x-go-name: Component - group: - $ref: '#/definitions/CommandGroup' - label: - description: |- - +optional - Optional label that provides a label for this command - to be used in Editor UI menus for example - type: string - x-go-name: Label - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - ApplyCommandParentOverride: - properties: - component: - description: |- - +optional - Describes component that will be applied - type: string - x-go-name: Component - group: - $ref: '#/definitions/CommandGroupParentOverride' - label: - description: |- - +optional - Optional label that provides a label for this command - to be used in Editor UI menus for example - type: string - x-go-name: Label - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - ApplyCommandPluginOverride: - properties: - component: - description: |- - +optional - Describes component that will be applied - type: string - x-go-name: Component - group: - $ref: '#/definitions/CommandGroupPluginOverride' - label: - description: |- - +optional - Optional label that provides a label for this command - to be used in Editor UI menus for example - type: string - x-go-name: Label - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - ApplyCommandPluginOverrideParentOverride: - properties: - component: - description: |- - +optional - Describes component that will be applied - type: string - x-go-name: Component - group: - $ref: '#/definitions/CommandGroupPluginOverrideParentOverride' - label: - description: |- - +optional - Optional label that provides a label for this command - to be used in Editor UI menus for example - type: string - x-go-name: Label - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - Attributes: - additionalProperties: - $ref: '#/definitions/JSON' - description: |- - Attributes provides a way to add a map of arbitrary YAML/JSON - objects. - +kubebuilder:validation:Type=object - +kubebuilder:validation:XPreserveUnknownFields - type: object - x-go-package: github.com/devfile/api/v2/pkg/attributes - CheckoutFrom: - properties: - remote: - description: |- - The remote name should be used as init. Required if there are more than one remote configured - +optional - type: string - x-go-name: Remote - revision: - description: |- - The revision to checkout from. Should be branch name, tag or commit id. - Default branch is used if missing or specified revision is not found. - +optional - type: string - x-go-name: Revision - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - CheckoutFromParentOverride: - properties: - remote: - description: |- - The remote name should be used as init. Required if there are more than one remote configured - +optional - type: string - x-go-name: Remote - revision: - description: |- - The revision to checkout from. Should be branch name, tag or commit id. - Default branch is used if missing or specified revision is not found. - +optional - type: string - x-go-name: Revision - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - Command: - properties: - apply: - $ref: '#/definitions/ApplyCommand' - attributes: - $ref: '#/definitions/Attributes' - commandType: - $ref: '#/definitions/CommandType' - composite: - $ref: '#/definitions/CompositeCommand' - custom: - $ref: '#/definitions/CustomCommand' - exec: - $ref: '#/definitions/ExecCommand' - id: - description: |- - Mandatory identifier that allows referencing - this command in composite commands, from - a parent, or in events. - +kubebuilder:validation:Pattern=^[a-z0-9]([-a-z0-9]*[a-z0-9])?$ - +kubebuilder:validation:MaxLength=63 - type: string - x-go-name: Id - vscodeLaunch: - $ref: '#/definitions/VscodeConfigurationCommand' - vscodeTask: - $ref: '#/definitions/VscodeConfigurationCommand' - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - CommandGroup: - properties: - isDefault: - description: |- - +optional - Identifies the default command for a given group kind - type: boolean - x-go-name: IsDefault - kind: - $ref: '#/definitions/CommandGroupKind' - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - CommandGroupKind: - description: +kubebuilder:validation:Enum=build;run;test;debug - title: CommandGroupKind describes the kind of command group. - type: string - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - CommandGroupKindParentOverride: - description: +kubebuilder:validation:Enum=build;run;test;debug - title: CommandGroupKind describes the kind of command group. - type: string - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - CommandGroupKindPluginOverride: - description: +kubebuilder:validation:Enum=build;run;test;debug - title: CommandGroupKind describes the kind of command group. - type: string - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - CommandGroupKindPluginOverrideParentOverride: - description: +kubebuilder:validation:Enum=build;run;test;debug - title: CommandGroupKind describes the kind of command group. - type: string - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - CommandGroupParentOverride: - properties: - isDefault: - description: |- - +optional - Identifies the default command for a given group kind - type: boolean - x-go-name: IsDefault - kind: - $ref: '#/definitions/CommandGroupKindParentOverride' - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - CommandGroupPluginOverride: - properties: - isDefault: - description: |- - +optional - Identifies the default command for a given group kind - type: boolean - x-go-name: IsDefault - kind: - $ref: '#/definitions/CommandGroupKindPluginOverride' - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - CommandGroupPluginOverrideParentOverride: - properties: - isDefault: - description: |- - +optional - Identifies the default command for a given group kind - type: boolean - x-go-name: IsDefault - kind: - $ref: '#/definitions/CommandGroupKindPluginOverrideParentOverride' - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - CommandParentOverride: - properties: - apply: - $ref: '#/definitions/ApplyCommandParentOverride' - attributes: - $ref: '#/definitions/Attributes' - commandType: - $ref: '#/definitions/CommandTypeParentOverride' - composite: - $ref: '#/definitions/CompositeCommandParentOverride' - exec: - $ref: '#/definitions/ExecCommandParentOverride' - id: - description: |- - Mandatory identifier that allows referencing - this command in composite commands, from - a parent, or in events. - +kubebuilder:validation:Pattern=^[a-z0-9]([-a-z0-9]*[a-z0-9])?$ - +kubebuilder:validation:MaxLength=63 - type: string - x-go-name: Id - vscodeLaunch: - $ref: '#/definitions/VscodeConfigurationCommandParentOverride' - vscodeTask: - $ref: '#/definitions/VscodeConfigurationCommandParentOverride' - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - CommandPluginOverride: - properties: - apply: - $ref: '#/definitions/ApplyCommandPluginOverride' - attributes: - $ref: '#/definitions/Attributes' - commandType: - $ref: '#/definitions/CommandTypePluginOverride' - composite: - $ref: '#/definitions/CompositeCommandPluginOverride' - exec: - $ref: '#/definitions/ExecCommandPluginOverride' - id: - description: |- - Mandatory identifier that allows referencing - this command in composite commands, from - a parent, or in events. - +kubebuilder:validation:Pattern=^[a-z0-9]([-a-z0-9]*[a-z0-9])?$ - +kubebuilder:validation:MaxLength=63 - type: string - x-go-name: Id - vscodeLaunch: - $ref: '#/definitions/VscodeConfigurationCommandPluginOverride' - vscodeTask: - $ref: '#/definitions/VscodeConfigurationCommandPluginOverride' - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - CommandPluginOverrideParentOverride: - properties: - apply: - $ref: '#/definitions/ApplyCommandPluginOverrideParentOverride' - attributes: - $ref: '#/definitions/Attributes' - commandType: - $ref: '#/definitions/CommandTypePluginOverrideParentOverride' - composite: - $ref: '#/definitions/CompositeCommandPluginOverrideParentOverride' - exec: - $ref: '#/definitions/ExecCommandPluginOverrideParentOverride' - id: - description: |- - Mandatory identifier that allows referencing - this command in composite commands, from - a parent, or in events. - +kubebuilder:validation:Pattern=^[a-z0-9]([-a-z0-9]*[a-z0-9])?$ - +kubebuilder:validation:MaxLength=63 - type: string - x-go-name: Id - vscodeLaunch: - $ref: '#/definitions/VscodeConfigurationCommandPluginOverrideParentOverride' - vscodeTask: - $ref: '#/definitions/VscodeConfigurationCommandPluginOverrideParentOverride' - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - CommandType: - description: |- - Only one of the following command type may be specified. - +kubebuilder:validation:Enum=Exec;Apply;VscodeTask;VscodeLaunch;Composite;Custom - title: CommandType describes the type of command. - type: string - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - CommandTypeParentOverride: - description: Only one of the following command type may be specified. - title: CommandType describes the type of command. - type: string - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - CommandTypePluginOverride: - description: Only one of the following command type may be specified. - title: CommandType describes the type of command. - type: string - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - CommandTypePluginOverrideParentOverride: - description: Only one of the following command type may be specified. - title: CommandType describes the type of command. - type: string - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - Component: - description: +k8s:openapi-gen=true - properties: - attributes: - $ref: '#/definitions/Attributes' - componentType: - $ref: '#/definitions/ComponentType' - container: - $ref: '#/definitions/ContainerComponent' - custom: - $ref: '#/definitions/CustomComponent' - kubernetes: - $ref: '#/definitions/KubernetesComponent' - name: - description: |- - Mandatory name that allows referencing the component - from other elements (such as commands) or from an external - devfile that may reference this component through a parent or a plugin. - +kubebuilder:validation:Pattern=^[a-z0-9]([-a-z0-9]*[a-z0-9])?$ - +kubebuilder:validation:MaxLength=63 - type: string - x-go-name: Name - openshift: - $ref: '#/definitions/OpenshiftComponent' - plugin: - $ref: '#/definitions/PluginComponent' - volume: - $ref: '#/definitions/VolumeComponent' - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - ComponentParentOverride: - description: +k8s:openapi-gen=true - properties: - attributes: - $ref: '#/definitions/Attributes' - componentType: - $ref: '#/definitions/ComponentTypeParentOverride' - container: - $ref: '#/definitions/ContainerComponentParentOverride' - kubernetes: - $ref: '#/definitions/KubernetesComponentParentOverride' - name: - description: |- - Mandatory name that allows referencing the component - from other elements (such as commands) or from an external - devfile that may reference this component through a parent or a plugin. - +kubebuilder:validation:Pattern=^[a-z0-9]([-a-z0-9]*[a-z0-9])?$ - +kubebuilder:validation:MaxLength=63 - type: string - x-go-name: Name - openshift: - $ref: '#/definitions/OpenshiftComponentParentOverride' - plugin: - $ref: '#/definitions/PluginComponentParentOverride' - volume: - $ref: '#/definitions/VolumeComponentParentOverride' - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - ComponentPluginOverride: - description: +k8s:openapi-gen=true - properties: - attributes: - $ref: '#/definitions/Attributes' - componentType: - $ref: '#/definitions/ComponentTypePluginOverride' - container: - $ref: '#/definitions/ContainerComponentPluginOverride' - kubernetes: - $ref: '#/definitions/KubernetesComponentPluginOverride' - name: - description: |- - Mandatory name that allows referencing the component - from other elements (such as commands) or from an external - devfile that may reference this component through a parent or a plugin. - +kubebuilder:validation:Pattern=^[a-z0-9]([-a-z0-9]*[a-z0-9])?$ - +kubebuilder:validation:MaxLength=63 - type: string - x-go-name: Name - openshift: - $ref: '#/definitions/OpenshiftComponentPluginOverride' - volume: - $ref: '#/definitions/VolumeComponentPluginOverride' - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - ComponentPluginOverrideParentOverride: - description: +k8s:openapi-gen=true - properties: - attributes: - $ref: '#/definitions/Attributes' - componentType: - $ref: '#/definitions/ComponentTypePluginOverrideParentOverride' - container: - $ref: '#/definitions/ContainerComponentPluginOverrideParentOverride' - kubernetes: - $ref: '#/definitions/KubernetesComponentPluginOverrideParentOverride' - name: - description: |- - Mandatory name that allows referencing the component - from other elements (such as commands) or from an external - devfile that may reference this component through a parent or a plugin. - +kubebuilder:validation:Pattern=^[a-z0-9]([-a-z0-9]*[a-z0-9])?$ - +kubebuilder:validation:MaxLength=63 - type: string - x-go-name: Name - openshift: - $ref: '#/definitions/OpenshiftComponentPluginOverrideParentOverride' - volume: - $ref: '#/definitions/VolumeComponentPluginOverrideParentOverride' - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - ComponentType: - description: |- - Only one of the following component type may be specified. - +kubebuilder:validation:Enum=Container;Kubernetes;Openshift;Volume;Plugin;Custom - title: ComponentType describes the type of component. - type: string - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - ComponentTypeParentOverride: - description: Only one of the following component type may be specified. - title: ComponentType describes the type of component. - type: string - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - ComponentTypePluginOverride: - description: Only one of the following component type may be specified. - title: ComponentType describes the type of component. - type: string - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - ComponentTypePluginOverrideParentOverride: - description: Only one of the following component type may be specified. - title: ComponentType describes the type of component. - type: string - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - CompositeCommand: - properties: - commands: - description: The commands that comprise this composite command - items: - type: string - type: array - x-go-name: Commands - group: - $ref: '#/definitions/CommandGroup' - label: - description: |- - +optional - Optional label that provides a label for this command - to be used in Editor UI menus for example - type: string - x-go-name: Label - parallel: - description: |- - Indicates if the sub-commands should be executed concurrently - +optional - type: boolean - x-go-name: Parallel - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - CompositeCommandParentOverride: - properties: - commands: - description: The commands that comprise this composite command - items: - type: string - type: array - x-go-name: Commands - group: - $ref: '#/definitions/CommandGroupParentOverride' - label: - description: |- - +optional - Optional label that provides a label for this command - to be used in Editor UI menus for example - type: string - x-go-name: Label - parallel: - description: |- - Indicates if the sub-commands should be executed concurrently - +optional - type: boolean - x-go-name: Parallel - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - CompositeCommandPluginOverride: - properties: - commands: - description: The commands that comprise this composite command - items: - type: string - type: array - x-go-name: Commands - group: - $ref: '#/definitions/CommandGroupPluginOverride' - label: - description: |- - +optional - Optional label that provides a label for this command - to be used in Editor UI menus for example - type: string - x-go-name: Label - parallel: - description: |- - Indicates if the sub-commands should be executed concurrently - +optional - type: boolean - x-go-name: Parallel - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - CompositeCommandPluginOverrideParentOverride: - properties: - commands: - description: The commands that comprise this composite command - items: - type: string - type: array - x-go-name: Commands - group: - $ref: '#/definitions/CommandGroupPluginOverrideParentOverride' - label: - description: |- - +optional - Optional label that provides a label for this command - to be used in Editor UI menus for example - type: string - x-go-name: Label - parallel: - description: |- - Indicates if the sub-commands should be executed concurrently - +optional - type: boolean - x-go-name: Parallel - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - ContainerComponent: - description: Component that allows the developer to add a configured container - into his workspace - properties: - args: - description: |- - The arguments to supply to the command running the dockerimage component. The arguments are supplied either to the default command provided in the image or to the overridden command. - - Defaults to an empty array, meaning use whatever is defined in the image. - +optional - items: - type: string - type: array - x-go-name: Args - command: - description: |- - The command to run in the dockerimage component instead of the default one provided in the image. - - Defaults to an empty array, meaning use whatever is defined in the image. - +optional - items: - type: string - type: array - x-go-name: Command - dedicatedPod: - description: |- - Specify if a container should run in its own separated pod, - instead of running as part of the main development environment pod. - - Default value is `false` - +optional - type: boolean - x-go-name: DedicatedPod - endpoints: - items: - $ref: '#/definitions/Endpoint' - type: array - x-go-name: Endpoints - env: - description: |- - +optional - +patchMergeKey=name - +patchStrategy=merge - Environment variables used in this container. - - The following variables are reserved and cannot be overridden via env: - - `$PROJECTS_ROOT` - - `$PROJECT_SOURCE` - items: - $ref: '#/definitions/EnvVar' - type: array - x-go-name: Env - image: - type: string - x-go-name: Image - memoryLimit: - description: +optional - type: string - x-go-name: MemoryLimit - mountSources: - description: |- - Toggles whether or not the project source code should - be mounted in the component. - - Defaults to true for all component types except plugins and components that set `dedicatedPod` to true. - +optional - type: boolean - x-go-name: MountSources - sourceMapping: - description: |- - Optional specification of the path in the container where - project sources should be transferred/mounted when `mountSources` is `true`. - When omitted, the default value of /projects is used. - +optional - +kubebuilder:default=/projects - type: string - x-go-name: SourceMapping - volumeMounts: - description: |- - +optional - List of volumes mounts that should be mounted is this container. - items: - $ref: '#/definitions/VolumeMount' - type: array - x-go-name: VolumeMounts - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - ContainerComponentParentOverride: - description: Component that allows the developer to add a configured container - into his workspace - properties: - args: - description: |- - The arguments to supply to the command running the dockerimage component. The arguments are supplied either to the default command provided in the image or to the overridden command. - - Defaults to an empty array, meaning use whatever is defined in the image. - +optional - items: - type: string - type: array - x-go-name: Args - command: - description: |- - The command to run in the dockerimage component instead of the default one provided in the image. - - Defaults to an empty array, meaning use whatever is defined in the image. - +optional - items: - type: string - type: array - x-go-name: Command - dedicatedPod: - description: |- - Specify if a container should run in its own separated pod, - instead of running as part of the main development environment pod. - - Default value is `false` - +optional - type: boolean - x-go-name: DedicatedPod - endpoints: - items: - $ref: '#/definitions/EndpointParentOverride' - type: array - x-go-name: Endpoints - env: - description: |- - +optional - +patchMergeKey=name - +patchStrategy=merge - Environment variables used in this container. - - The following variables are reserved and cannot be overridden via env: - - `$PROJECTS_ROOT` - - `$PROJECT_SOURCE` - items: - $ref: '#/definitions/EnvVarParentOverride' - type: array - x-go-name: Env - image: - description: +optional - type: string - x-go-name: Image - memoryLimit: - description: +optional - type: string - x-go-name: MemoryLimit - mountSources: - description: |- - Toggles whether or not the project source code should - be mounted in the component. - - Defaults to true for all component types except plugins and components that set `dedicatedPod` to true. - +optional - type: boolean - x-go-name: MountSources - sourceMapping: - description: |- - Optional specification of the path in the container where - project sources should be transferred/mounted when `mountSources` is `true`. - When omitted, the default value of /projects is used. - +optional - type: string - x-go-name: SourceMapping - volumeMounts: - description: |- - +optional - List of volumes mounts that should be mounted is this container. - items: - $ref: '#/definitions/VolumeMountParentOverride' - type: array - x-go-name: VolumeMounts - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - ContainerComponentPluginOverride: - description: Component that allows the developer to add a configured container - into his workspace - properties: - args: - description: |- - The arguments to supply to the command running the dockerimage component. The arguments are supplied either to the default command provided in the image or to the overridden command. - - Defaults to an empty array, meaning use whatever is defined in the image. - +optional - items: - type: string - type: array - x-go-name: Args - command: - description: |- - The command to run in the dockerimage component instead of the default one provided in the image. - - Defaults to an empty array, meaning use whatever is defined in the image. - +optional - items: - type: string - type: array - x-go-name: Command - dedicatedPod: - description: |- - Specify if a container should run in its own separated pod, - instead of running as part of the main development environment pod. - - Default value is `false` - +optional - type: boolean - x-go-name: DedicatedPod - endpoints: - items: - $ref: '#/definitions/EndpointPluginOverride' - type: array - x-go-name: Endpoints - env: - description: |- - +optional - +patchMergeKey=name - +patchStrategy=merge - Environment variables used in this container. - - The following variables are reserved and cannot be overridden via env: - - `$PROJECTS_ROOT` - - `$PROJECT_SOURCE` - items: - $ref: '#/definitions/EnvVarPluginOverride' - type: array - x-go-name: Env - image: - description: +optional - type: string - x-go-name: Image - memoryLimit: - description: +optional - type: string - x-go-name: MemoryLimit - mountSources: - description: |- - Toggles whether or not the project source code should - be mounted in the component. - - Defaults to true for all component types except plugins and components that set `dedicatedPod` to true. - +optional - type: boolean - x-go-name: MountSources - sourceMapping: - description: |- - Optional specification of the path in the container where - project sources should be transferred/mounted when `mountSources` is `true`. - When omitted, the default value of /projects is used. - +optional - type: string - x-go-name: SourceMapping - volumeMounts: - description: |- - +optional - List of volumes mounts that should be mounted is this container. - items: - $ref: '#/definitions/VolumeMountPluginOverride' - type: array - x-go-name: VolumeMounts - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - ContainerComponentPluginOverrideParentOverride: - description: Component that allows the developer to add a configured container - into his workspace - properties: - args: - description: |- - The arguments to supply to the command running the dockerimage component. The arguments are supplied either to the default command provided in the image or to the overridden command. - - Defaults to an empty array, meaning use whatever is defined in the image. - +optional - items: - type: string - type: array - x-go-name: Args - command: - description: |- - The command to run in the dockerimage component instead of the default one provided in the image. - - Defaults to an empty array, meaning use whatever is defined in the image. - +optional - items: - type: string - type: array - x-go-name: Command - dedicatedPod: - description: |- - Specify if a container should run in its own separated pod, - instead of running as part of the main development environment pod. - - Default value is `false` - +optional - type: boolean - x-go-name: DedicatedPod - endpoints: - items: - $ref: '#/definitions/EndpointPluginOverrideParentOverride' - type: array - x-go-name: Endpoints - env: - description: |- - +optional - +patchMergeKey=name - +patchStrategy=merge - Environment variables used in this container. - - The following variables are reserved and cannot be overridden via env: - - `$PROJECTS_ROOT` - - `$PROJECT_SOURCE` - items: - $ref: '#/definitions/EnvVarPluginOverrideParentOverride' - type: array - x-go-name: Env - image: - description: +optional - type: string - x-go-name: Image - memoryLimit: - description: +optional - type: string - x-go-name: MemoryLimit - mountSources: - description: |- - Toggles whether or not the project source code should - be mounted in the component. - - Defaults to true for all component types except plugins and components that set `dedicatedPod` to true. - +optional - type: boolean - x-go-name: MountSources - sourceMapping: - description: |- - Optional specification of the path in the container where - project sources should be transferred/mounted when `mountSources` is `true`. - When omitted, the default value of /projects is used. - +optional - type: string - x-go-name: SourceMapping - volumeMounts: - description: |- - +optional - List of volumes mounts that should be mounted is this container. - items: - $ref: '#/definitions/VolumeMountPluginOverrideParentOverride' - type: array - x-go-name: VolumeMounts - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - CustomCommand: - properties: - commandClass: - description: |- - Class of command that the associated implementation component - should use to process this command with the appropriate logic - type: string - x-go-name: CommandClass - embeddedResource: - $ref: '#/definitions/RawExtension' - group: - $ref: '#/definitions/CommandGroup' - label: - description: |- - +optional - Optional label that provides a label for this command - to be used in Editor UI menus for example - type: string - x-go-name: Label - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - CustomComponent: - properties: - componentClass: - description: |- - Class of component that the associated implementation controller - should use to process this command with the appropriate logic - type: string - x-go-name: ComponentClass - embeddedResource: - $ref: '#/definitions/RawExtension' - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - CustomProjectSource: - properties: - embeddedResource: - $ref: '#/definitions/RawExtension' - projectSourceClass: - type: string - x-go-name: ProjectSourceClass - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - Devfile: - description: |- - +k8s:deepcopy-gen=false - +devfile:jsonschema:generate:omitCustomUnionMembers=true - properties: - commands: - description: |- - Predefined, ready-to-use, workspace-related commands - +optional - +patchMergeKey=id - +patchStrategy=merge - +devfile:overrides:include:description=Overrides of commands encapsulated in a parent devfile or a plugin. - +devfile:toplevellist - items: - $ref: '#/definitions/Command' - type: array - x-go-name: Commands - components: - description: |- - List of the workspace components, such as editor and plugins, - user-provided containers, or other types of components - +optional - +patchMergeKey=name - +patchStrategy=merge - +devfile:overrides:include:description=Overrides of components encapsulated in a parent devfile or a plugin. - +devfile:toplevellist - items: - $ref: '#/definitions/Component' - type: array - x-go-name: Components - events: - $ref: '#/definitions/Events' - metadata: - $ref: '#/definitions/DevfileMetadata' - parent: - $ref: '#/definitions/Parent' - projects: - description: |- - Projects worked on in the workspace, containing names and sources locations - +optional - +patchMergeKey=name - +patchStrategy=merge - +devfile:overrides:include:omitInPlugin=true,description=Overrides of projects encapsulated in a parent devfile. - +devfile:toplevellist - items: - $ref: '#/definitions/Project' - type: array - x-go-name: Projects - schemaVersion: - description: |- - Devfile schema version - +kubebuilder:validation:Pattern=^([2-9])\.([0-9]+)\.([0-9]+)(\-[0-9a-z-]+(\.[0-9a-z-]+)*)?(\+[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?$ - type: string - x-go-name: SchemaVersion - starterProjects: - description: |- - StarterProjects is a project that can be used as a starting point when bootstrapping new projects - +optional - +patchMergeKey=name - +patchStrategy=merge - +devfile:overrides:include:omitInPlugin=true,description=Overrides of starterProjects encapsulated in a parent devfile. - +devfile:toplevellist - items: - $ref: '#/definitions/StarterProject' - type: array - x-go-name: StarterProjects - title: Devfile describes the structure of a cloud-native workspace and development - environment. - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - DevfileMetadata: - properties: - attributes: - $ref: '#/definitions/Attributes' - description: - description: |- - Optional devfile description - +optional - type: string - x-go-name: Description - displayName: - description: |- - Optional devfile display name - +optional - type: string - x-go-name: DisplayName - globalMemoryLimit: - description: |- - Optional devfile global memory limit - +optional - type: string - x-go-name: GlobalMemoryLimit - icon: - description: |- - Optional devfile icon - +optional - type: string - x-go-name: Icon - name: - description: |- - Optional devfile name - +optional - type: string - x-go-name: Name - tags: - description: |- - Optional devfile tags - +optional - items: - type: string - type: array - x-go-name: Tags - version: - description: |- - Optional semver-compatible version - +optional - +kubebuilder:validation:Pattern=^([0-9]+)\.([0-9]+)\.([0-9]+)(\-[0-9a-z-]+(\.[0-9a-z-]+)*)?(\+[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?$ - type: string - x-go-name: Version - type: object - x-go-package: github.com/devfile/api/v2/pkg/devfile - Endpoint: - properties: - attributes: - $ref: '#/definitions/Attributes' - exposure: - $ref: '#/definitions/EndpointExposure' - name: - description: |- - +kubebuilder:validation:Pattern=^[a-z0-9]([-a-z0-9]*[a-z0-9])?$ - +kubebuilder:validation:MaxLength=63 - type: string - x-go-name: Name - path: - description: |- - Path of the endpoint URL - +optional - type: string - x-go-name: Path - protocol: - $ref: '#/definitions/EndpointProtocol' - secure: - description: |- - Describes whether the endpoint should be secured and protected by some - authentication process. This requires a protocol of `https` or `wss`. - +optional - type: boolean - x-go-name: Secure - targetPort: - format: int64 - type: integer - x-go-name: TargetPort - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - EndpointExposure: - description: |- - Only one of the following exposures may be specified: public, internal, none. - +kubebuilder:validation:Enum=public;internal;none - title: EndpointExposure describes the way an endpoint is exposed on the network. - type: string - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - EndpointExposureParentOverride: - description: |- - Only one of the following exposures may be specified: public, internal, none. - +kubebuilder:validation:Enum=public;internal;none - title: EndpointExposure describes the way an endpoint is exposed on the network. - type: string - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - EndpointExposurePluginOverride: - description: |- - Only one of the following exposures may be specified: public, internal, none. - +kubebuilder:validation:Enum=public;internal;none - title: EndpointExposure describes the way an endpoint is exposed on the network. - type: string - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - EndpointExposurePluginOverrideParentOverride: - description: |- - Only one of the following exposures may be specified: public, internal, none. - +kubebuilder:validation:Enum=public;internal;none - title: EndpointExposure describes the way an endpoint is exposed on the network. - type: string - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - EndpointParentOverride: - properties: - attributes: - $ref: '#/definitions/Attributes' - exposure: - $ref: '#/definitions/EndpointExposureParentOverride' - name: - description: |- - +kubebuilder:validation:Pattern=^[a-z0-9]([-a-z0-9]*[a-z0-9])?$ - +kubebuilder:validation:MaxLength=63 - type: string - x-go-name: Name - path: - description: |- - Path of the endpoint URL - +optional - type: string - x-go-name: Path - protocol: - $ref: '#/definitions/EndpointProtocolParentOverride' - secure: - description: |- - Describes whether the endpoint should be secured and protected by some - authentication process. This requires a protocol of `https` or `wss`. - +optional - type: boolean - x-go-name: Secure - targetPort: - description: +optional - format: int64 - type: integer - x-go-name: TargetPort - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - EndpointPluginOverride: - properties: - attributes: - $ref: '#/definitions/Attributes' - exposure: - $ref: '#/definitions/EndpointExposurePluginOverride' - name: - description: |- - +kubebuilder:validation:Pattern=^[a-z0-9]([-a-z0-9]*[a-z0-9])?$ - +kubebuilder:validation:MaxLength=63 - type: string - x-go-name: Name - path: - description: |- - Path of the endpoint URL - +optional - type: string - x-go-name: Path - protocol: - $ref: '#/definitions/EndpointProtocolPluginOverride' - secure: - description: |- - Describes whether the endpoint should be secured and protected by some - authentication process. This requires a protocol of `https` or `wss`. - +optional - type: boolean - x-go-name: Secure - targetPort: - description: +optional - format: int64 - type: integer - x-go-name: TargetPort - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - EndpointPluginOverrideParentOverride: - properties: - attributes: - $ref: '#/definitions/Attributes' - exposure: - $ref: '#/definitions/EndpointExposurePluginOverrideParentOverride' - name: - description: |- - +kubebuilder:validation:Pattern=^[a-z0-9]([-a-z0-9]*[a-z0-9])?$ - +kubebuilder:validation:MaxLength=63 - type: string - x-go-name: Name - path: - description: |- - Path of the endpoint URL - +optional - type: string - x-go-name: Path - protocol: - $ref: '#/definitions/EndpointProtocolPluginOverrideParentOverride' - secure: - description: |- - Describes whether the endpoint should be secured and protected by some - authentication process. This requires a protocol of `https` or `wss`. - +optional - type: boolean - x-go-name: Secure - targetPort: - description: +optional - format: int64 - type: integer - x-go-name: TargetPort - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - EndpointProtocol: - description: |- - Only one of the following protocols may be specified: http, ws, tcp, udp. - +kubebuilder:validation:Enum=http;https;ws;wss;tcp;udp - title: EndpointProtocol defines the application and transport protocols of the - traffic that will go through this endpoint. - type: string - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - EndpointProtocolParentOverride: - description: |- - Only one of the following protocols may be specified: http, ws, tcp, udp. - +kubebuilder:validation:Enum=http;https;ws;wss;tcp;udp - title: EndpointProtocol defines the application and transport protocols of the - traffic that will go through this endpoint. - type: string - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - EndpointProtocolPluginOverride: - description: |- - Only one of the following protocols may be specified: http, ws, tcp, udp. - +kubebuilder:validation:Enum=http;https;ws;wss;tcp;udp - title: EndpointProtocol defines the application and transport protocols of the - traffic that will go through this endpoint. - type: string - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - EndpointProtocolPluginOverrideParentOverride: - description: |- - Only one of the following protocols may be specified: http, ws, tcp, udp. - +kubebuilder:validation:Enum=http;https;ws;wss;tcp;udp - title: EndpointProtocol defines the application and transport protocols of the - traffic that will go through this endpoint. - type: string - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - EnvVar: - properties: - name: - type: string - x-go-name: Name - value: - type: string - x-go-name: Value - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - EnvVarParentOverride: - properties: - name: - type: string - x-go-name: Name - value: - description: +optional - type: string - x-go-name: Value - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - EnvVarPluginOverride: - properties: - name: - type: string - x-go-name: Name - value: - description: +optional - type: string - x-go-name: Value - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - EnvVarPluginOverrideParentOverride: - properties: - name: - type: string - x-go-name: Name - value: - description: +optional - type: string - x-go-name: Value - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - Events: - properties: - postStart: - description: |- - IDs of commands that should be executed after the workspace is completely started. - In the case of Che-Theia, these commands should be executed after all plugins and extensions have started, including project cloning. - This means that those commands are not triggered until the user opens the IDE in his browser. - +optional - items: - type: string - type: array - x-go-name: PostStart - postStop: - description: |- - +optional - IDs of commands that should be executed after stopping the workspace. - items: - type: string - type: array - x-go-name: PostStop - preStart: - description: |- - IDs of commands that should be executed before the workspace start. - Kubernetes-wise, these commands would typically be executed in init containers of the workspace POD. - +optional - items: - type: string - type: array - x-go-name: PreStart - preStop: - description: |- - +optional - IDs of commands that should be executed before stopping the workspace. - items: - type: string - type: array - x-go-name: PreStop - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - ExecCommand: - properties: - commandLine: - description: |- - The actual command-line string - - Special variables that can be used: - - `$PROJECTS_ROOT`: A path where projects sources are mounted as defined by container component's sourceMapping. - - `$PROJECT_SOURCE`: A path to a project source ($PROJECTS_ROOT/). If there are multiple projects, this will point to the directory of the first one. - type: string - x-go-name: CommandLine - component: - description: Describes component to which given action relates - type: string - x-go-name: Component - env: - description: |- - +optional - +patchMergeKey=name - +patchStrategy=merge - Optional list of environment variables that have to be set - before running the command - items: - $ref: '#/definitions/EnvVar' - type: array - x-go-name: Env - group: - $ref: '#/definitions/CommandGroup' - hotReloadCapable: - description: |- - +optional - Whether the command is capable to reload itself when source code changes. - If set to `true` the command won't be restarted and it is expected to handle file changes on its own. - - Default value is `false` - type: boolean - x-go-name: HotReloadCapable - label: - description: |- - +optional - Optional label that provides a label for this command - to be used in Editor UI menus for example - type: string - x-go-name: Label - workingDir: - description: |- - Working directory where the command should be executed - - Special variables that can be used: - - `$PROJECTS_ROOT`: A path where projects sources are mounted as defined by container component's sourceMapping. - - `$PROJECT_SOURCE`: A path to a project source ($PROJECTS_ROOT/). If there are multiple projects, this will point to the directory of the first one. - +optional - type: string - x-go-name: WorkingDir - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - ExecCommandParentOverride: - properties: - commandLine: - description: |- - +optional - The actual command-line string - - Special variables that can be used: - - `$PROJECTS_ROOT`: A path where projects sources are mounted as defined by container component's sourceMapping. - - `$PROJECT_SOURCE`: A path to a project source ($PROJECTS_ROOT/). If there are multiple projects, this will point to the directory of the first one. - type: string - x-go-name: CommandLine - component: - description: |- - +optional - Describes component to which given action relates - type: string - x-go-name: Component - env: - description: |- - +optional - +patchMergeKey=name - +patchStrategy=merge - Optional list of environment variables that have to be set - before running the command - items: - $ref: '#/definitions/EnvVarParentOverride' - type: array - x-go-name: Env - group: - $ref: '#/definitions/CommandGroupParentOverride' - hotReloadCapable: - description: |- - +optional - Whether the command is capable to reload itself when source code changes. - If set to `true` the command won't be restarted and it is expected to handle file changes on its own. - - Default value is `false` - type: boolean - x-go-name: HotReloadCapable - label: - description: |- - +optional - Optional label that provides a label for this command - to be used in Editor UI menus for example - type: string - x-go-name: Label - workingDir: - description: |- - Working directory where the command should be executed - - Special variables that can be used: - - `$PROJECTS_ROOT`: A path where projects sources are mounted as defined by container component's sourceMapping. - - `$PROJECT_SOURCE`: A path to a project source ($PROJECTS_ROOT/). If there are multiple projects, this will point to the directory of the first one. - +optional - type: string - x-go-name: WorkingDir - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - ExecCommandPluginOverride: - properties: - commandLine: - description: |- - +optional - The actual command-line string - - Special variables that can be used: - - `$PROJECTS_ROOT`: A path where projects sources are mounted as defined by container component's sourceMapping. - - `$PROJECT_SOURCE`: A path to a project source ($PROJECTS_ROOT/). If there are multiple projects, this will point to the directory of the first one. - type: string - x-go-name: CommandLine - component: - description: |- - +optional - Describes component to which given action relates - type: string - x-go-name: Component - env: - description: |- - +optional - +patchMergeKey=name - +patchStrategy=merge - Optional list of environment variables that have to be set - before running the command - items: - $ref: '#/definitions/EnvVarPluginOverride' - type: array - x-go-name: Env - group: - $ref: '#/definitions/CommandGroupPluginOverride' - hotReloadCapable: - description: |- - +optional - Whether the command is capable to reload itself when source code changes. - If set to `true` the command won't be restarted and it is expected to handle file changes on its own. - - Default value is `false` - type: boolean - x-go-name: HotReloadCapable - label: - description: |- - +optional - Optional label that provides a label for this command - to be used in Editor UI menus for example - type: string - x-go-name: Label - workingDir: - description: |- - Working directory where the command should be executed - - Special variables that can be used: - - `$PROJECTS_ROOT`: A path where projects sources are mounted as defined by container component's sourceMapping. - - `$PROJECT_SOURCE`: A path to a project source ($PROJECTS_ROOT/). If there are multiple projects, this will point to the directory of the first one. - +optional - type: string - x-go-name: WorkingDir - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - ExecCommandPluginOverrideParentOverride: - properties: - commandLine: - description: |- - +optional - The actual command-line string - - Special variables that can be used: - - `$PROJECTS_ROOT`: A path where projects sources are mounted as defined by container component's sourceMapping. - - `$PROJECT_SOURCE`: A path to a project source ($PROJECTS_ROOT/). If there are multiple projects, this will point to the directory of the first one. - type: string - x-go-name: CommandLine - component: - description: |- - +optional - Describes component to which given action relates - type: string - x-go-name: Component - env: - description: |- - +optional - +patchMergeKey=name - +patchStrategy=merge - Optional list of environment variables that have to be set - before running the command - items: - $ref: '#/definitions/EnvVarPluginOverrideParentOverride' - type: array - x-go-name: Env - group: - $ref: '#/definitions/CommandGroupPluginOverrideParentOverride' - hotReloadCapable: - description: |- - +optional - Whether the command is capable to reload itself when source code changes. - If set to `true` the command won't be restarted and it is expected to handle file changes on its own. - - Default value is `false` - type: boolean - x-go-name: HotReloadCapable - label: - description: |- - +optional - Optional label that provides a label for this command - to be used in Editor UI menus for example - type: string - x-go-name: Label - workingDir: - description: |- - Working directory where the command should be executed - - Special variables that can be used: - - `$PROJECTS_ROOT`: A path where projects sources are mounted as defined by container component's sourceMapping. - - `$PROJECT_SOURCE`: A path to a project source ($PROJECTS_ROOT/). If there are multiple projects, this will point to the directory of the first one. - +optional - type: string - x-go-name: WorkingDir - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - GitProjectSource: - properties: - checkoutFrom: - $ref: '#/definitions/CheckoutFrom' - remotes: - additionalProperties: - type: string - description: The remotes map which should be initialized in the git project. - Must have at least one remote configured - type: object - x-go-name: Remotes - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - GitProjectSourceParentOverride: - properties: - checkoutFrom: - $ref: '#/definitions/CheckoutFromParentOverride' - remotes: - additionalProperties: - type: string - description: |- - +optional - The remotes map which should be initialized in the git project. Must have at least one remote configured - type: object - x-go-name: Remotes - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - GithubProjectSource: - properties: - checkoutFrom: - $ref: '#/definitions/CheckoutFrom' - remotes: - additionalProperties: - type: string - description: The remotes map which should be initialized in the git project. - Must have at least one remote configured - type: object - x-go-name: Remotes - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - GithubProjectSourceParentOverride: - properties: - checkoutFrom: - $ref: '#/definitions/CheckoutFromParentOverride' - remotes: - additionalProperties: - type: string - description: |- - +optional - The remotes map which should be initialized in the git project. Must have at least one remote configured - type: object - x-go-name: Remotes - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - ImportReferenceType: - description: |- - ImportReferenceType describes the type of location - from where the referenced template structure should be retrieved. - Only one of the following parent locations may be specified. - +kubebuilder:validation:Enum=Uri;Id;Kubernetes - type: string - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - ImportReferenceTypeParentOverride: - description: |- - ImportReferenceType describes the type of location - from where the referenced template structure should be retrieved. - Only one of the following parent locations may be specified. - type: string - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - JSON: - description: 'These types are supported: bool, int64, float64, string, []interface{}, - map[string]interface{} and nil.' - properties: - Raw: - items: - format: uint8 - type: integer - type: array - title: JSON represents any valid JSON value. - type: object - x-go-package: k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1 - K8sLikeComponentLocationType: - description: |- - K8sLikeComponentLocationType describes the type of - the location the configuration is fetched from. - Only one of the following component type may be specified. - +kubebuilder:validation:Enum=Uri;Inlined - type: string - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - K8sLikeComponentLocationTypeParentOverride: - description: |- - K8sLikeComponentLocationType describes the type of - the location the configuration is fetched from. - Only one of the following component type may be specified. - type: string - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - K8sLikeComponentLocationTypePluginOverride: - description: |- - K8sLikeComponentLocationType describes the type of - the location the configuration is fetched from. - Only one of the following component type may be specified. - type: string - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - K8sLikeComponentLocationTypePluginOverrideParentOverride: - description: |- - K8sLikeComponentLocationType describes the type of - the location the configuration is fetched from. - Only one of the following component type may be specified. - type: string - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - KubernetesComponent: - description: Component that allows partly importing Kubernetes resources into - the workspace POD - properties: - endpoints: - items: - $ref: '#/definitions/Endpoint' - type: array - x-go-name: Endpoints - inlined: - description: |- - Inlined manifest - +optional - type: string - x-go-name: Inlined - locationType: - $ref: '#/definitions/K8sLikeComponentLocationType' - uri: - description: |- - Location in a file fetched from a uri. - +optional - type: string - x-go-name: Uri - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - KubernetesComponentParentOverride: - description: Component that allows partly importing Kubernetes resources into - the workspace POD - properties: - endpoints: - items: - $ref: '#/definitions/EndpointParentOverride' - type: array - x-go-name: Endpoints - inlined: - description: |- - Inlined manifest - +optional - type: string - x-go-name: Inlined - locationType: - $ref: '#/definitions/K8sLikeComponentLocationTypeParentOverride' - uri: - description: |- - Location in a file fetched from a uri. - +optional - type: string - x-go-name: Uri - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - KubernetesComponentPluginOverride: - description: Component that allows partly importing Kubernetes resources into - the workspace POD - properties: - endpoints: - items: - $ref: '#/definitions/EndpointPluginOverride' - type: array - x-go-name: Endpoints - inlined: - description: |- - Inlined manifest - +optional - type: string - x-go-name: Inlined - locationType: - $ref: '#/definitions/K8sLikeComponentLocationTypePluginOverride' - uri: - description: |- - Location in a file fetched from a uri. - +optional - type: string - x-go-name: Uri - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - KubernetesComponentPluginOverrideParentOverride: - description: Component that allows partly importing Kubernetes resources into - the workspace POD - properties: - endpoints: - items: - $ref: '#/definitions/EndpointPluginOverrideParentOverride' - type: array - x-go-name: Endpoints - inlined: - description: |- - Inlined manifest - +optional - type: string - x-go-name: Inlined - locationType: - $ref: '#/definitions/K8sLikeComponentLocationTypePluginOverrideParentOverride' - uri: - description: |- - Location in a file fetched from a uri. - +optional - type: string - x-go-name: Uri - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - KubernetesCustomResourceImportReference: - properties: - name: - type: string - x-go-name: Name - namespace: - description: +optional - type: string - x-go-name: Namespace - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - KubernetesCustomResourceImportReferenceParentOverride: - properties: - name: - description: +optional - type: string - x-go-name: Name - namespace: - description: +optional - type: string - x-go-name: Namespace - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - OpenshiftComponent: - description: Component that allows partly importing Openshift resources into the - workspace POD - properties: - endpoints: - items: - $ref: '#/definitions/Endpoint' - type: array - x-go-name: Endpoints - inlined: - description: |- - Inlined manifest - +optional - type: string - x-go-name: Inlined - locationType: - $ref: '#/definitions/K8sLikeComponentLocationType' - uri: - description: |- - Location in a file fetched from a uri. - +optional - type: string - x-go-name: Uri - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - OpenshiftComponentParentOverride: - description: Component that allows partly importing Openshift resources into the - workspace POD - properties: - endpoints: - items: - $ref: '#/definitions/EndpointParentOverride' - type: array - x-go-name: Endpoints - inlined: - description: |- - Inlined manifest - +optional - type: string - x-go-name: Inlined - locationType: - $ref: '#/definitions/K8sLikeComponentLocationTypeParentOverride' - uri: - description: |- - Location in a file fetched from a uri. - +optional - type: string - x-go-name: Uri - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - OpenshiftComponentPluginOverride: - description: Component that allows partly importing Openshift resources into the - workspace POD - properties: - endpoints: - items: - $ref: '#/definitions/EndpointPluginOverride' - type: array - x-go-name: Endpoints - inlined: - description: |- - Inlined manifest - +optional - type: string - x-go-name: Inlined - locationType: - $ref: '#/definitions/K8sLikeComponentLocationTypePluginOverride' - uri: - description: |- - Location in a file fetched from a uri. - +optional - type: string - x-go-name: Uri - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - OpenshiftComponentPluginOverrideParentOverride: - description: Component that allows partly importing Openshift resources into the - workspace POD - properties: - endpoints: - items: - $ref: '#/definitions/EndpointPluginOverrideParentOverride' - type: array - x-go-name: Endpoints - inlined: - description: |- - Inlined manifest - +optional - type: string - x-go-name: Inlined - locationType: - $ref: '#/definitions/K8sLikeComponentLocationTypePluginOverrideParentOverride' - uri: - description: |- - Location in a file fetched from a uri. - +optional - type: string - x-go-name: Uri - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - Parent: - properties: - commands: - description: |- - Overrides of commands encapsulated in a parent devfile or a plugin. - Overriding is done according to K8S strategic merge patch standard rules. - +optional - +patchMergeKey=id - +patchStrategy=merge - +devfile:toplevellist - items: - $ref: '#/definitions/CommandParentOverride' - type: array - x-go-name: Commands - components: - description: |- - Overrides of components encapsulated in a parent devfile or a plugin. - Overriding is done according to K8S strategic merge patch standard rules. - +optional - +patchMergeKey=name - +patchStrategy=merge - +devfile:toplevellist - items: - $ref: '#/definitions/ComponentParentOverride' - type: array - x-go-name: Components - id: - description: |- - Id in a registry that contains a Devfile yaml file - +optional - type: string - x-go-name: Id - importReferenceType: - $ref: '#/definitions/ImportReferenceType' - kubernetes: - $ref: '#/definitions/KubernetesCustomResourceImportReference' - projects: - description: |- - Overrides of projects encapsulated in a parent devfile. - Overriding is done according to K8S strategic merge patch standard rules. - +optional - +patchMergeKey=name - +patchStrategy=merge - +devfile:toplevellist - items: - $ref: '#/definitions/ProjectParentOverride' - type: array - x-go-name: Projects - registryUrl: - description: +optional - type: string - x-go-name: RegistryUrl - starterProjects: - description: |- - Overrides of starterProjects encapsulated in a parent devfile. - Overriding is done according to K8S strategic merge patch standard rules. - +optional - +patchMergeKey=name - +patchStrategy=merge - +devfile:toplevellist - items: - $ref: '#/definitions/StarterProjectParentOverride' - type: array - x-go-name: StarterProjects - uri: - description: |- - Uri of a Devfile yaml file - +optional - type: string - x-go-name: Uri - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - PluginComponent: - properties: - commands: - description: |- - Overrides of commands encapsulated in a parent devfile or a plugin. - Overriding is done according to K8S strategic merge patch standard rules. - +optional - +patchMergeKey=id - +patchStrategy=merge - +devfile:toplevellist - items: - $ref: '#/definitions/CommandPluginOverride' - type: array - x-go-name: Commands - components: - description: |- - Overrides of components encapsulated in a parent devfile or a plugin. - Overriding is done according to K8S strategic merge patch standard rules. - +optional - +patchMergeKey=name - +patchStrategy=merge - +devfile:toplevellist - items: - $ref: '#/definitions/ComponentPluginOverride' - type: array - x-go-name: Components - id: - description: |- - Id in a registry that contains a Devfile yaml file - +optional - type: string - x-go-name: Id - importReferenceType: - $ref: '#/definitions/ImportReferenceType' - kubernetes: - $ref: '#/definitions/KubernetesCustomResourceImportReference' - registryUrl: - description: +optional - type: string - x-go-name: RegistryUrl - uri: - description: |- - Uri of a Devfile yaml file - +optional - type: string - x-go-name: Uri - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - PluginComponentParentOverride: - properties: - commands: - description: |- - Overrides of commands encapsulated in a parent devfile or a plugin. - Overriding is done according to K8S strategic merge patch standard rules. - +optional - +patchMergeKey=id - +patchStrategy=merge - +devfile:toplevellist - items: - $ref: '#/definitions/CommandPluginOverrideParentOverride' - type: array - x-go-name: Commands - components: - description: |- - Overrides of components encapsulated in a parent devfile or a plugin. - Overriding is done according to K8S strategic merge patch standard rules. - +optional - +patchMergeKey=name - +patchStrategy=merge - +devfile:toplevellist - items: - $ref: '#/definitions/ComponentPluginOverrideParentOverride' - type: array - x-go-name: Components - id: - description: |- - Id in a registry that contains a Devfile yaml file - +optional - type: string - x-go-name: Id - importReferenceType: - $ref: '#/definitions/ImportReferenceTypeParentOverride' - kubernetes: - $ref: '#/definitions/KubernetesCustomResourceImportReferenceParentOverride' - registryUrl: - description: +optional - type: string - x-go-name: RegistryUrl - uri: - description: |- - Uri of a Devfile yaml file - +optional - type: string - x-go-name: Uri - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - Project: - properties: - attributes: - $ref: '#/definitions/Attributes' - clonePath: - description: |- - Path relative to the root of the projects to which this project should be cloned into. This is a unix-style relative path (i.e. uses forward slashes). The path is invalid if it is absolute or tries to escape the project root through the usage of '..'. If not specified, defaults to the project name. - +optional - type: string - x-go-name: ClonePath - custom: - $ref: '#/definitions/CustomProjectSource' - git: - $ref: '#/definitions/GitProjectSource' - github: - $ref: '#/definitions/GithubProjectSource' - name: - description: |- - Project name - +kubebuilder:validation:Pattern=^[a-z0-9]([-a-z0-9]*[a-z0-9])?$ - +kubebuilder:validation:MaxLength=63 - type: string - x-go-name: Name - sourceType: - $ref: '#/definitions/ProjectSourceType' - sparseCheckoutDirs: - description: |- - Populate the project sparsely with selected directories. - +optional - items: - type: string - type: array - x-go-name: SparseCheckoutDirs - zip: - $ref: '#/definitions/ZipProjectSource' - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - ProjectParentOverride: - properties: - attributes: - $ref: '#/definitions/Attributes' - clonePath: - description: |- - Path relative to the root of the projects to which this project should be cloned into. This is a unix-style relative path (i.e. uses forward slashes). The path is invalid if it is absolute or tries to escape the project root through the usage of '..'. If not specified, defaults to the project name. - +optional - type: string - x-go-name: ClonePath - git: - $ref: '#/definitions/GitProjectSourceParentOverride' - github: - $ref: '#/definitions/GithubProjectSourceParentOverride' - name: - description: |- - Project name - +kubebuilder:validation:Pattern=^[a-z0-9]([-a-z0-9]*[a-z0-9])?$ - +kubebuilder:validation:MaxLength=63 - type: string - x-go-name: Name - sourceType: - $ref: '#/definitions/ProjectSourceTypeParentOverride' - sparseCheckoutDirs: - description: |- - Populate the project sparsely with selected directories. - +optional - items: - type: string - type: array - x-go-name: SparseCheckoutDirs - zip: - $ref: '#/definitions/ZipProjectSourceParentOverride' - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - ProjectSourceType: - description: |- - Only one of the following project sources may be specified. - If none of the following policies is specified, the default one - is AllowConcurrent. - +kubebuilder:validation:Enum=Git;Github;Zip;Custom - title: ProjectSourceType describes the type of Project sources. - type: string - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - ProjectSourceTypeParentOverride: - description: |- - Only one of the following project sources may be specified. - If none of the following policies is specified, the default one - is AllowConcurrent. - title: ProjectSourceType describes the type of Project sources. - type: string - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - RawExtension: - description: |- - To use this, make a field which has RawExtension as its type in your external, versioned - struct, and Object in your internal struct. You also need to register your - various plugin types. - - Internal package: - type MyAPIObject struct { - runtime.TypeMeta `json:",inline"` - MyPlugin runtime.Object `json:"myPlugin"` - } - type PluginA struct { - AOption string `json:"aOption"` - } - - External package: - type MyAPIObject struct { - runtime.TypeMeta `json:",inline"` - MyPlugin runtime.RawExtension `json:"myPlugin"` - } - type PluginA struct { - AOption string `json:"aOption"` - } - - On the wire, the JSON will look something like this: - { - "kind":"MyAPIObject", - "apiVersion":"v1", - "myPlugin": { - "kind":"PluginA", - "aOption":"foo", - }, - } - - So what happens? Decode first uses json or yaml to unmarshal the serialized data into - your external MyAPIObject. That causes the raw JSON to be stored, but not unpacked. - The next step is to copy (using pkg/conversion) into the internal struct. The runtime - package's DefaultScheme has conversion functions installed which will unpack the - JSON stored in RawExtension, turning it into the correct object type, and storing it - in the Object. (TODO: In the case where the object is of an unknown type, a - runtime.Unknown object will be created and stored.) - - +k8s:deepcopy-gen=true - +protobuf=true - +k8s:openapi-gen=true - title: RawExtension is used to hold extensions in external versions. - type: object - x-go-package: k8s.io/apimachinery/pkg/runtime - Schema: - description: Schema is the index file schema - properties: - description: - type: string - x-go-name: Description - displayName: - type: string - x-go-name: DisplayName - language: - type: string - x-go-name: Language - links: - additionalProperties: - type: string - type: object - x-go-name: Links - name: - type: string - x-go-name: Name - projectType: - type: string - x-go-name: ProjectType - resources: - items: - type: string - type: array - x-go-name: Resources - starterProjects: - items: - type: string - type: array - x-go-name: StarterProjects - tags: - items: - type: string - type: array - x-go-name: Tags - type: object - x-go-package: github.com/devfile/registry-support/index/generator/schema - StarterProject: - properties: - attributes: - $ref: '#/definitions/Attributes' - custom: - $ref: '#/definitions/CustomProjectSource' - description: - description: |- - Description of a starter project - +optional - type: string - x-go-name: Description - git: - $ref: '#/definitions/GitProjectSource' - github: - $ref: '#/definitions/GithubProjectSource' - name: - description: |- - Project name - +kubebuilder:validation:Pattern=^[a-z0-9]([-a-z0-9]*[a-z0-9])?$ - +kubebuilder:validation:MaxLength=63 - type: string - x-go-name: Name - sourceType: - $ref: '#/definitions/ProjectSourceType' - subDir: - description: |- - Sub-directory from a starter project to be used as root for starter project. - +optional - type: string - x-go-name: SubDir - zip: - $ref: '#/definitions/ZipProjectSource' - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - StarterProjectParentOverride: - properties: - attributes: - $ref: '#/definitions/Attributes' - description: - description: |- - Description of a starter project - +optional - type: string - x-go-name: Description - git: - $ref: '#/definitions/GitProjectSourceParentOverride' - github: - $ref: '#/definitions/GithubProjectSourceParentOverride' - name: - description: |- - Project name - +kubebuilder:validation:Pattern=^[a-z0-9]([-a-z0-9]*[a-z0-9])?$ - +kubebuilder:validation:MaxLength=63 - type: string - x-go-name: Name - sourceType: - $ref: '#/definitions/ProjectSourceTypeParentOverride' - subDir: - description: |- - Sub-directory from a starter project to be used as root for starter project. - +optional - type: string - x-go-name: SubDir - zip: - $ref: '#/definitions/ZipProjectSourceParentOverride' - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - VolumeComponent: - description: Component that allows the developer to declare and configure a volume - into his workspace - properties: - size: - description: |- - +optional - Size of the volume - type: string - x-go-name: Size - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - VolumeComponentParentOverride: - description: Component that allows the developer to declare and configure a volume - into his workspace - properties: - size: - description: |- - +optional - Size of the volume - type: string - x-go-name: Size - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - VolumeComponentPluginOverride: - description: Component that allows the developer to declare and configure a volume - into his workspace - properties: - size: - description: |- - +optional - Size of the volume - type: string - x-go-name: Size - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - VolumeComponentPluginOverrideParentOverride: - description: Component that allows the developer to declare and configure a volume - into his workspace - properties: - size: - description: |- - +optional - Size of the volume - type: string - x-go-name: Size - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - VolumeMount: - description: Volume that should be mounted to a component container - properties: - name: - description: |- - The volume mount name is the name of an existing `Volume` component. - If several containers mount the same volume name - then they will reuse the same volume and will be able to access to the same files. - +kubebuilder:validation:Pattern=^[a-z0-9]([-a-z0-9]*[a-z0-9])?$ - +kubebuilder:validation:MaxLength=63 - type: string - x-go-name: Name - path: - description: |- - The path in the component container where the volume should be mounted. - If not path is mentioned, default path is the is `/`. - +optional - type: string - x-go-name: Path - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - VolumeMountParentOverride: - description: Volume that should be mounted to a component container - properties: - name: - description: |- - The volume mount name is the name of an existing `Volume` component. - If several containers mount the same volume name - then they will reuse the same volume and will be able to access to the same files. - +kubebuilder:validation:Pattern=^[a-z0-9]([-a-z0-9]*[a-z0-9])?$ - +kubebuilder:validation:MaxLength=63 - type: string - x-go-name: Name - path: - description: |- - The path in the component container where the volume should be mounted. - If not path is mentioned, default path is the is `/`. - +optional - type: string - x-go-name: Path - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - VolumeMountPluginOverride: - description: Volume that should be mounted to a component container - properties: - name: - description: |- - The volume mount name is the name of an existing `Volume` component. - If several containers mount the same volume name - then they will reuse the same volume and will be able to access to the same files. - +kubebuilder:validation:Pattern=^[a-z0-9]([-a-z0-9]*[a-z0-9])?$ - +kubebuilder:validation:MaxLength=63 - type: string - x-go-name: Name - path: - description: |- - The path in the component container where the volume should be mounted. - If not path is mentioned, default path is the is `/`. - +optional - type: string - x-go-name: Path - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - VolumeMountPluginOverrideParentOverride: - description: Volume that should be mounted to a component container - properties: - name: - description: |- - The volume mount name is the name of an existing `Volume` component. - If several containers mount the same volume name - then they will reuse the same volume and will be able to access to the same files. - +kubebuilder:validation:Pattern=^[a-z0-9]([-a-z0-9]*[a-z0-9])?$ - +kubebuilder:validation:MaxLength=63 - type: string - x-go-name: Name - path: - description: |- - The path in the component container where the volume should be mounted. - If not path is mentioned, default path is the is `/`. - +optional - type: string - x-go-name: Path - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - VscodeConfigurationCommand: - properties: - group: - $ref: '#/definitions/CommandGroup' - inlined: - description: |- - Inlined content of the VsCode configuration - +optional - type: string - x-go-name: Inlined - locationType: - $ref: '#/definitions/VscodeConfigurationCommandLocationType' - uri: - description: |- - Location as an absolute of relative URI - the VsCode configuration will be fetched from - +optional - type: string - x-go-name: Uri - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - VscodeConfigurationCommandLocationType: - description: |- - VscodeConfigurationCommandLocationType describes the type of - the location the configuration is fetched from. - Only one of the following component type may be specified. - +kubebuilder:validation:Enum=Uri;Inlined - type: string - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - VscodeConfigurationCommandLocationTypeParentOverride: - description: |- - VscodeConfigurationCommandLocationType describes the type of - the location the configuration is fetched from. - Only one of the following component type may be specified. - type: string - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - VscodeConfigurationCommandLocationTypePluginOverride: - description: |- - VscodeConfigurationCommandLocationType describes the type of - the location the configuration is fetched from. - Only one of the following component type may be specified. - type: string - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - VscodeConfigurationCommandLocationTypePluginOverrideParentOverride: - description: |- - VscodeConfigurationCommandLocationType describes the type of - the location the configuration is fetched from. - Only one of the following component type may be specified. - type: string - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - VscodeConfigurationCommandParentOverride: - properties: - group: - $ref: '#/definitions/CommandGroupParentOverride' - inlined: - description: |- - Inlined content of the VsCode configuration - +optional - type: string - x-go-name: Inlined - locationType: - $ref: '#/definitions/VscodeConfigurationCommandLocationTypeParentOverride' - uri: - description: |- - Location as an absolute of relative URI - the VsCode configuration will be fetched from - +optional - type: string - x-go-name: Uri - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - VscodeConfigurationCommandPluginOverride: - properties: - group: - $ref: '#/definitions/CommandGroupPluginOverride' - inlined: - description: |- - Inlined content of the VsCode configuration - +optional - type: string - x-go-name: Inlined - locationType: - $ref: '#/definitions/VscodeConfigurationCommandLocationTypePluginOverride' - uri: - description: |- - Location as an absolute of relative URI - the VsCode configuration will be fetched from - +optional - type: string - x-go-name: Uri - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - VscodeConfigurationCommandPluginOverrideParentOverride: - properties: - group: - $ref: '#/definitions/CommandGroupPluginOverrideParentOverride' - inlined: - description: |- - Inlined content of the VsCode configuration - +optional - type: string - x-go-name: Inlined - locationType: - $ref: '#/definitions/VscodeConfigurationCommandLocationTypePluginOverrideParentOverride' - uri: - description: |- - Location as an absolute of relative URI - the VsCode configuration will be fetched from - +optional - type: string - x-go-name: Uri - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - ZipProjectSource: - properties: - location: - description: |- - Zip project's source location address. Should be file path of the archive, e.g. file://$FILE_PATH - +required - type: string - x-go-name: Location - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 - ZipProjectSourceParentOverride: - properties: - location: - description: |- - Zip project's source location address. Should be file path of the archive, e.g. file://$FILE_PATH - +required - type: string - x-go-name: Location - type: object - x-go-package: github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2 -host: preview-devfile-registry-stage.apps.app-sre-stage-0.k3s7.p1.openshiftapps.com -info: - description: |- - Documentation of devfile registry REST API. - - The devfile registry REST API is used for interacting with devfile registry. In this documentation, the host is serving the public devfile registry, you can change it to your own host if you want to use private devfile registry. - title: Devfile registry REST API. - version: 1.0.0 -paths: - /devfiles/{stack}: - get: - description: Return the specific stack devfile content of devfile registry. - operationId: getDevfile - parameters: - - description: The stack name - in: path - name: stack - required: true - type: string - x-go-name: Stack - produces: - - application/yaml - - application/json - responses: - "200": - $ref: '#/responses/devfileResponse' - "404": - $ref: '#/responses/devfileNotFoundResponse' - "500": - $ref: '#/responses/devfileErrorResponse' - summary: Get devfile by stack name. - tags: - - devfile - /health: - get: - description: Return the devfile registry health status. - operationId: getStatus - produces: - - application/json - responses: - "200": - $ref: '#/responses/healthResponse' - "404": - description: ' Page not found.' - summary: Get health status. - tags: - - server - /index: - get: - description: Return the index content of devfile registry. - operationId: getIndex - produces: - - application/json - responses: - "200": - $ref: '#/responses/indexResponse' - "404": - description: ' Page not found.' - summary: Get index. - tags: - - devfile -produces: -- application/json -- application/yaml -responses: - devfileErrorResponse: - description: Failed to get the devfile. - schema: - properties: - error: - type: string - x-go-name: Error - status: - type: string - x-go-name: Status - type: object - devfileNotFoundResponse: - description: Failed to find the devfile. - schema: - properties: - status: - type: string - x-go-name: Status - type: object - devfileResponse: - description: |- - Successful operation. - - Stack devfile content. - schema: - $ref: '#/definitions/Devfile' - healthResponse: - description: |- - Successful operation. - - Health status. - schema: - properties: - message: - type: string - x-go-name: Message - type: object - indexResponse: - description: |- - Successful operation. - - Index content. - schema: - items: - $ref: '#/definitions/Schema' - type: array -schemes: -- http -- https -securityDefinitions: - basic: - type: basic -swagger: "2.0" diff --git a/index/server/templates/imports.tmpl b/index/server/templates/imports.tmpl new file mode 100644 index 00000000..a0274b71 --- /dev/null +++ b/index/server/templates/imports.tmpl @@ -0,0 +1,17 @@ +// +// Copyright 2023 Red Hat, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by {{.ModuleName}} version {{.Version}}. **DO NOT EDIT** +package {{.PackageName}} diff --git a/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/.editorconfig b/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/.editorconfig new file mode 100644 index 00000000..fbb2c8f3 --- /dev/null +++ b/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/.editorconfig @@ -0,0 +1,6 @@ +[*] +end_of_line = lf +insert_final_newline = true + +[*.{cmd,bat}] +end_of_line = crlf diff --git a/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/.gitattributes b/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/.gitattributes new file mode 100644 index 00000000..9be55028 --- /dev/null +++ b/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/.gitattributes @@ -0,0 +1,175 @@ +## AUTO-DETECT - Handle line endings automatically for files detected +## as text and leave all files detected as binary untouched. +## This will handle all files NOT defined below. +* text=auto + +# Custom for Visual Studio +*.sln text eol=crlf +*.csproj text eol=crlf +*.vbproj text eol=crlf +*.fsproj text eol=crlf +*.dbproj text eol=crlf + +*.vcxproj text eol=crlf +*.vcxitems text eol=crlf +*.props text eol=crlf +*.filters text eol=crlf + +# Documents +*.doc diff=astextplain +*.DOC diff=astextplain +*.docx diff=astextplain +*.DOCX diff=astextplain +*.dot diff=astextplain +*.DOT diff=astextplain +*.pdf diff=astextplain +*.PDF diff=astextplain +*.rtf diff=astextplain +*.RTF diff=astextplain +*.csv text +*.sql text +*.ini text + +## SOURCE CODE +*.go text eol=lf +*.c text eol=lf +*.h text eol=lf +*.bat text eol=crlf +*.cmd text eol=crlf +*.coffee text eol=lf + +*.htm text diff=html +*.html text diff=html +*.xml text diff=html +*.xhtml text diff=html + +*.js text eol=lf +*.jsx text eol=lf +*.json text eol=lf +*.ts text eol=lf + +*.css text diff=css eol=lf +*.scss text diff=css eol=lf +*.less text diff=css eol=lf +*.sass text eol=lf + +*.sh text eol=lf + +## DOCUMENTATION +*.md text eol=lf +*.txt text +AUTHORS text eol=lf +CHANGELOG text eol=lf +CHANGES text eol=lf +CONTRIBUTING text eol=lf +COPYING text eol=lf +INSTALL text eol=lf +license text eol=lf +LICENSE text eol=lf +NEWS text eol=lf +readme text eol=lf +*README* text eol=lf +TODO text eol=lf + +## TEMPLATES +*.dot text +*.ejs text +*.haml text +*.handlebars text +*.hbs text +*.hbt text +*.jade text +*.latte text +*.mustache text +*.tmpl text + +## LINTERS +.csslintrc text eol=lf +.eslintrc text eol=lf +.jscsrc text eol=lf +.jshintrc text eol=lf +.jshintignore text eol=lf +.stylelintrc text eol=lf + +## CONFIGS +*.bowerrc text eol=lf +*.cnf text +*.conf text +*.config text +.editorconfig text eol=lf +.gitattributes text eol=lf +.gitconfig text eol=lf +.gitignore text eol=lf +*.npmignore text eol=lf +*.yaml text eol=lf +*.yml text eol=lf +Makefile text eol=lf +makefile text eol=lf + +## GRAPHICS +*.ai binary +*.bmp binary +*.eps binary +*.gif binary +*.ico binary +*.jng binary +*.jp2 binary +*.jpg binary +*.jpeg binary +*.jpx binary +*.jxr binary +*.pdf binary +*.png binary +*.psb binary +*.psd binary +*.svg text +*.svgz binary +*.tif binary +*.tiff binary +*.wbmp binary +*.webp binary + +## AUDIO +*.kar binary +*.m4a binary +*.mid binary +*.midi binary +*.mp3 binary +*.ogg binary +*.ra binary + +## VIDEO +*.3gpp binary +*.3gp binary +*.as binary +*.asf binary +*.asx binary +*.fla binary +*.flv binary +*.m4v binary +*.mng binary +*.mov binary +*.mp4 binary +*.mpeg binary +*.mpg binary +*.swc binary +*.swf binary +*.webm binary + +## ARCHIVES +*.7z binary +*.gz binary +*.rar binary +*.tar binary +*.zip binary + +## FONTS +*.ttf binary +*.eot binary +*.otf binary +*.woff binary +*.woff2 binary + +## EXECUTABLES +*.exe binary +*.dll binary diff --git a/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/.gitignore b/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/.gitignore new file mode 100644 index 00000000..5efe425b --- /dev/null +++ b/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/.gitignore @@ -0,0 +1,11 @@ +# GoLand +/.idea/ + +/vendor/ + +/cmd/cmd.exe +/cmd/cmd + +/artifacts/ +/test/ +/cmd/test/ diff --git a/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/.gitlab-ci.yml b/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/.gitlab-ci.yml new file mode 100644 index 00000000..a0d178f9 --- /dev/null +++ b/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/.gitlab-ci.yml @@ -0,0 +1,42 @@ +variables: + GOPROJ: "github.com/RaveNoX/go-jsonmerge" + + +stages: +- test +- build + +test: + tags: + - docker + - linux + image: golang:latest + stage: test + script: + - mkdir -p artifacts + - go test -cover -v -coverprofile="./artifacts/cover.out" ./ + - go tool cover -html="./artifacts/cover.out" -o "./artifacts/cover.htm" + - go test -cover -v -coverprofile="./artifacts/cover_cmd.out" ./cmd/jsonmerge + - go tool cover -html="./artifacts/cover_cmd.out" -o "./artifacts/cover_cmd.htm" + artifacts: + paths: + - artifacts/* + +build: + stage: build + tags: + - docker + - linux + image: golang:latest + script: + - mkdir -p artifacts + - echo "Building for Linux" + - GOOS=linux GOARCH=amd64 go build -o artifacts/jsonmerge ./cmd/jsonmerge + - echo "Building for MacOS (darwin)" + - GOOS=darwin GOARCH=amd64 go build -o artifacts/jsonmerge_darwin ./cmd/jsonmerge + - echo "Building for Windows" + - GOOS=windows GOARCH=amd64 go build -o artifacts/jsonmerge.exe ./cmd/jsonmerge + artifacts: + paths: + - artifacts/* + diff --git a/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/.travis.yml b/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/.travis.yml new file mode 100644 index 00000000..eae1a8a2 --- /dev/null +++ b/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/.travis.yml @@ -0,0 +1,19 @@ +language: go + +go: +- 1.x + +install: +- mkdir -p artifacts + +env: + - GO111MODULE=on + +script: +- go test -cover -v -coverprofile="./artifacts/cover.out" ./ +- go tool cover -html="./artifacts/cover.out" -o "./artifacts/cover.htm" +- go test -cover -v -coverprofile="./artifacts/cover_cmd.out" ./cmd/jsonmerge +- go tool cover -html="./artifacts/cover_cmd.out" -o "./artifacts/cover_cmd.htm" +- GOARCH=amd64 GOOS=linux go build -o artifacts/jsonmerge ./cmd/jsonmerge +- GOARCH=amd64 GOOS=windows go build -o artifacts/jsonmerge.exe ./cmd/jsonmerge +- GOARCH=amd64 GOOS=darwin go build -o artifacts/jsonmerge_darwin ./cmd/jsonmerge diff --git a/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/LICENSE b/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/LICENSE new file mode 100644 index 00000000..f23057e4 --- /dev/null +++ b/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2016-2019 Artur Kraev + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/README.md b/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/README.md new file mode 100644 index 00000000..e11bd4c4 --- /dev/null +++ b/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/README.md @@ -0,0 +1,81 @@ +# go-jsonmerge +[![Build Status](https://travis-ci.org/RaveNoX/go-jsonmerge.svg?branch=master)](https://travis-ci.org/RaveNoX/go-jsonmerge) +[![GoDoc](https://godoc.org/github.com/RaveNoX/go-jsonmerge?status.svg)](https://godoc.org/github.com/RaveNoX/go-jsonmerge) + +GO library for merging JSON objects + +## Original document +```json +{ + "number": 1, + "string": "value", + "object": { + "number": 1, + "string": "value", + "nested object": { + "number": 2 + }, + "array": [1, 2, 3], + "partial_array": [1, 2, 3] + } +} +``` + +## Patch +```json +{ + "number": 2, + "string": "value1", + "nonexitent": "woot", + "object": { + "number": 3, + "string": "value2", + "nested object": { + "number": 4 + }, + "array": [3, 2, 1], + "partial_array": { + "1": 4 + } + } +} +``` + +## Result +```json +{ + "number": 2, + "string": "value1", + "object": { + "number": 3, + "string": "value2", + "nested object": { + "number": 4 + }, + "array": [3, 2, 1], + "partial_array": [1, 4, 3] + } +} +``` + +## Commandline Tool + +```bash +$ go get -u github.com/RaveNoX/go-jsonmerge/cmd/jsonmerge +$ jsonmerge [options] ... +# For help +$ jsonmerge -h +``` + +## Development +``` +# Install depencencies +./init.sh + +# Build +./build.sh +``` + + +## License +[MIT](./LICENSE.MD) diff --git a/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/build.cmd b/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/build.cmd new file mode 100644 index 00000000..ebd98767 --- /dev/null +++ b/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/build.cmd @@ -0,0 +1,25 @@ +@ECHO OFF +setlocal + +set GOARCH=amd64 + +cd %~dp0 +md artifacts + +echo Windows +set GOOS=windows +call go build -o artifacts\jsonmerge.exe .\cmd || goto :error + +echo Linux +set GOOS=linux +call go build -o artifacts\jsonmerge .\cmd || goto :error + +echo Darwin +set GOOS=darwin +call go build -o artifacts\jsonmerge_darwin .\cmd || goto :error + +echo Build done +exit + +:error +exit /b %errorlevel% diff --git a/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/build.sh b/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/build.sh new file mode 100644 index 00000000..29be8924 --- /dev/null +++ b/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/build.sh @@ -0,0 +1,19 @@ +#!/bin/sh + +set -e + +MY_DIR=$(dirname "$0") + +cd "${MY_DIR}" +mkdir -p "artifacts" + +echo "Linux" +GOARCH=amd64 GOOS=linux go build -o "artifacts/jsonmerge" ./cmd + +echo "Windows" +GOARCH=amd64 GOOS=windows go build -o "artifacts/jsonmerge.exe" ./cmd + +echo "Mac(darwin)" +GOARCH=amd64 GOOS=darwin go build -o "artifacts/jsonmerge_darwin" ./cmd + +echo "Build done" diff --git a/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/doc.go b/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/doc.go new file mode 100644 index 00000000..5bd6e15e --- /dev/null +++ b/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/doc.go @@ -0,0 +1,52 @@ +// Package jsonmerge helps mergeing JSON objects +// +// For example you have this documents: +// +// original.json +// { +// "number": 1, +// "string": "value", +// "object": { +// "number": 1, +// "string": "value", +// "nested object": { +// "number": 2 +// }, +// "array": [1, 2, 3], +// "partial_array": [1, 2, 3] +// } +// } +// +// patch.json +// { +// "number": 2, +// "string": "value1", +// "nonexitent": "woot", +// "object": { +// "number": 3, +// "string": "value2", +// "nested object": { +// "number": 4 +// }, +// "array": [3, 2, 1], +// "partial_array": { +// "1": 4 +// } +// } +// } +// +// After merge you will have this result: +// { +// "number": 2, +// "string": "value1", +// "object": { +// "number": 3, +// "string": "value2", +// "nested object": { +// "number": 4 +// }, +// "array": [3, 2, 1], +// "partial_array": [1, 4, 3] +// } +// } +package jsonmerge diff --git a/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/merge.go b/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/merge.go new file mode 100644 index 00000000..8ba052f1 --- /dev/null +++ b/index/server/vendor/github.com/apapsch/go-jsonmerge/v2/merge.go @@ -0,0 +1,167 @@ +package jsonmerge + +import ( + "bytes" + "encoding/json" + "fmt" + "reflect" + "strconv" + "strings" +) + +// Merger describes result of merge operation and provides +// configuration. +type Merger struct { + // Errors is slice of non-critical errors of merge operations + Errors []error + // Replaced is describe replacements + // Key is path in document like + // "prop1.prop2.prop3" for object properties or + // "arr1.1.prop" for arrays + // Value is value of replacemet + Replaced map[string]interface{} + // CopyNonexistent enables setting fields into the result + // which only exist in the patch. + CopyNonexistent bool +} + +func (m *Merger) mergeValue(path []string, patch map[string]interface{}, key string, value interface{}) interface{} { + patchValue, patchHasValue := patch[key] + + if !patchHasValue { + return value + } + + _, patchValueIsObject := patchValue.(map[string]interface{}) + + path = append(path, key) + pathStr := strings.Join(path, ".") + + if _, ok := value.(map[string]interface{}); ok { + if !patchValueIsObject { + err := fmt.Errorf("patch value must be object for key \"%v\"", pathStr) + m.Errors = append(m.Errors, err) + return value + } + + return m.mergeObjects(value, patchValue, path) + } + + if _, ok := value.([]interface{}); ok && patchValueIsObject { + return m.mergeObjects(value, patchValue, path) + } + + if !reflect.DeepEqual(value, patchValue) { + m.Replaced[pathStr] = patchValue + } + + return patchValue +} + +func (m *Merger) mergeObjects(data, patch interface{}, path []string) interface{} { + if patchObject, ok := patch.(map[string]interface{}); ok { + if dataArray, ok := data.([]interface{}); ok { + ret := make([]interface{}, len(dataArray)) + + for i, val := range dataArray { + ret[i] = m.mergeValue(path, patchObject, strconv.Itoa(i), val) + } + + return ret + } else if dataObject, ok := data.(map[string]interface{}); ok { + ret := make(map[string]interface{}) + + for k, v := range dataObject { + ret[k] = m.mergeValue(path, patchObject, k, v) + } + if m.CopyNonexistent { + for k, v := range patchObject { + if _, ok := dataObject[k]; !ok { + ret[k] = v + } + } + } + + return ret + } + } + + return data +} + +// Merge merges patch document to data document +// +// Returning merged document. Result of merge operation can be +// obtained from the Merger. Result information is discarded before +// merging. +func (m *Merger) Merge(data, patch interface{}) interface{} { + m.Replaced = make(map[string]interface{}) + m.Errors = make([]error, 0) + return m.mergeObjects(data, patch, nil) +} + +// MergeBytesIndent merges patch document buffer to data document buffer +// +// Use prefix and indent for set indentation like in json.MarshalIndent +// +// Returning merged document buffer and error if any. +func (m *Merger) MergeBytesIndent(dataBuff, patchBuff []byte, prefix, indent string) (mergedBuff []byte, err error) { + var data, patch, merged interface{} + + err = unmarshalJSON(dataBuff, &data) + if err != nil { + err = fmt.Errorf("error in data JSON: %v", err) + return + } + + err = unmarshalJSON(patchBuff, &patch) + if err != nil { + err = fmt.Errorf("error in patch JSON: %v", err) + return + } + + merged = m.Merge(data, patch) + + mergedBuff, err = json.MarshalIndent(merged, prefix, indent) + if err != nil { + err = fmt.Errorf("error writing merged JSON: %v", err) + } + + return +} + +// MergeBytes merges patch document buffer to data document buffer +// +// Returning merged document buffer, merge info and +// error if any +func (m *Merger) MergeBytes(dataBuff, patchBuff []byte) (mergedBuff []byte, err error) { + var data, patch, merged interface{} + + err = unmarshalJSON(dataBuff, &data) + if err != nil { + err = fmt.Errorf("error in data JSON: %v", err) + return + } + + err = unmarshalJSON(patchBuff, &patch) + if err != nil { + err = fmt.Errorf("error in patch JSON: %v", err) + return + } + + merged = m.Merge(data, patch) + + mergedBuff, err = json.Marshal(merged) + if err != nil { + err = fmt.Errorf("error writing merged JSON: %v", err) + } + + return +} + +func unmarshalJSON(buff []byte, data interface{}) error { + decoder := json.NewDecoder(bytes.NewReader(buff)) + decoder.UseNumber() + + return decoder.Decode(data) +} diff --git a/index/server/vendor/github.com/deepmap/oapi-codegen/LICENSE b/index/server/vendor/github.com/deepmap/oapi-codegen/LICENSE new file mode 100644 index 00000000..261eeb9e --- /dev/null +++ b/index/server/vendor/github.com/deepmap/oapi-codegen/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/gin-middleware/oapi_validate.go b/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/gin-middleware/oapi_validate.go new file mode 100644 index 00000000..82c1f52c --- /dev/null +++ b/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/gin-middleware/oapi_validate.go @@ -0,0 +1,196 @@ +// Copyright 2021 DeepMap, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package middleware + +import ( + "context" + "errors" + "fmt" + "net/http" + "os" + "strings" + + "github.com/getkin/kin-openapi/openapi3" + "github.com/getkin/kin-openapi/openapi3filter" + "github.com/getkin/kin-openapi/routers" + "github.com/getkin/kin-openapi/routers/gorillamux" + "github.com/gin-gonic/gin" +) + +const ( + GinContextKey = "oapi-codegen/gin-context" + UserDataKey = "oapi-codegen/user-data" +) + +// OapiValidatorFromYamlFile creates a validator middleware from a YAML file path +func OapiValidatorFromYamlFile(path string) (gin.HandlerFunc, error) { + data, err := os.ReadFile(path) + if err != nil { + return nil, fmt.Errorf("error reading %s: %s", path, err) + } + + swagger, err := openapi3.NewLoader().LoadFromData(data) + if err != nil { + return nil, fmt.Errorf("error parsing %s as Swagger YAML: %s", + path, err) + } + return OapiRequestValidator(swagger), nil +} + +// OapiRequestValidator is an gin middleware function which validates incoming HTTP requests +// to make sure that they conform to the given OAPI 3.0 specification. When +// OAPI validation fails on the request, we return an HTTP/400 with error message +func OapiRequestValidator(swagger *openapi3.T) gin.HandlerFunc { + return OapiRequestValidatorWithOptions(swagger, nil) +} + +// ErrorHandler is called when there is an error in validation +type ErrorHandler func(c *gin.Context, message string, statusCode int) + +// MultiErrorHandler is called when oapi returns a MultiError type +type MultiErrorHandler func(openapi3.MultiError) error + +// Options to customize request validation. These are passed through to +// openapi3filter. +type Options struct { + ErrorHandler ErrorHandler + Options openapi3filter.Options + ParamDecoder openapi3filter.ContentParameterDecoder + UserData interface{} + MultiErrorHandler MultiErrorHandler +} + +// OapiRequestValidatorWithOptions creates a validator from a swagger object, with validation options +func OapiRequestValidatorWithOptions(swagger *openapi3.T, options *Options) gin.HandlerFunc { + router, err := gorillamux.NewRouter(swagger) + if err != nil { + panic(err) + } + return func(c *gin.Context) { + err := ValidateRequestFromContext(c, router, options) + if err != nil { + if options != nil && options.ErrorHandler != nil { + options.ErrorHandler(c, err.Error(), http.StatusBadRequest) + // in case the handler didn't internally call Abort, stop the chain + c.Abort() + } else { + // note: i am not sure if this is the best way to handle this + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + } + } + c.Next() + } +} + +// ValidateRequestFromContext is called from the middleware above and actually does the work +// of validating a request. +func ValidateRequestFromContext(c *gin.Context, router routers.Router, options *Options) error { + req := c.Request + route, pathParams, err := router.FindRoute(req) + + // We failed to find a matching route for the request. + if err != nil { + switch e := err.(type) { + case *routers.RouteError: + // We've got a bad request, the path requested doesn't match + // either server, or path, or something. + return errors.New(e.Reason) + default: + // This should never happen today, but if our upstream code changes, + // we don't want to crash the server, so handle the unexpected error. + return fmt.Errorf("error validating route: %s", err.Error()) + } + } + + validationInput := &openapi3filter.RequestValidationInput{ + Request: req, + PathParams: pathParams, + Route: route, + } + + // Pass the gin context into the request validator, so that any callbacks + // which it invokes make it available. + requestContext := context.WithValue(context.Background(), GinContextKey, c) //nolint:staticcheck + + if options != nil { + validationInput.Options = &options.Options + validationInput.ParamDecoder = options.ParamDecoder + requestContext = context.WithValue(requestContext, UserDataKey, options.UserData) //nolint:staticcheck + } + + err = openapi3filter.ValidateRequest(requestContext, validationInput) + if err != nil { + me := openapi3.MultiError{} + if errors.As(err, &me) { + errFunc := getMultiErrorHandlerFromOptions(options) + return errFunc(me) + } + + switch e := err.(type) { + case *openapi3filter.RequestError: + // We've got a bad request + // Split up the verbose error by lines and return the first one + // openapi errors seem to be multi-line with a decent message on the first + errorLines := strings.Split(e.Error(), "\n") + return fmt.Errorf("error in openapi3filter.RequestError: %s", errorLines[0]) + case *openapi3filter.SecurityRequirementsError: + return fmt.Errorf("error in openapi3filter.SecurityRequirementsError: %s", e.Error()) + default: + // This should never happen today, but if our upstream code changes, + // we don't want to crash the server, so handle the unexpected error. + return fmt.Errorf("error validating request: %s", err) + } + } + return nil +} + +// GetGinContext gets the echo context from within requests. It returns +// nil if not found or wrong type. +func GetGinContext(c context.Context) *gin.Context { + iface := c.Value(GinContextKey) + if iface == nil { + return nil + } + ginCtx, ok := iface.(*gin.Context) + if !ok { + return nil + } + return ginCtx +} + +func GetUserData(c context.Context) interface{} { + return c.Value(UserDataKey) +} + +// attempt to get the MultiErrorHandler from the options. If it is not set, +// return a default handler +func getMultiErrorHandlerFromOptions(options *Options) MultiErrorHandler { + if options == nil { + return defaultMultiErrorHandler + } + + if options.MultiErrorHandler == nil { + return defaultMultiErrorHandler + } + + return options.MultiErrorHandler +} + +// defaultMultiErrorHandler returns a StatusBadRequest (400) and a list +// of all of the errors. This method is called if there are no other +// methods defined on the options. +func defaultMultiErrorHandler(me openapi3.MultiError) error { + return fmt.Errorf("multiple errors encountered: %s", me) +} diff --git a/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/gin-middleware/test_spec.yaml b/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/gin-middleware/test_spec.yaml new file mode 100644 index 00000000..6e0a2415 --- /dev/null +++ b/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/gin-middleware/test_spec.yaml @@ -0,0 +1,103 @@ +openapi: "3.0.0" +info: + version: 1.0.0 + title: TestServer +servers: + - url: http://deepmap.ai/ +paths: + /resource: + get: + operationId: getResource + parameters: + - name: id + in: query + schema: + type: integer + minimum: 10 + maximum: 100 + responses: + '200': + description: success + content: + application/json: + schema: + properties: + name: + type: string + id: + type: integer + post: + operationId: createResource + responses: + '204': + description: No content + requestBody: + required: true + content: + application/json: + schema: + properties: + name: + type: string + /protected_resource: + get: + operationId: getProtectedResource + security: + - BearerAuth: + - someScope + responses: + '204': + description: no content + /protected_resource2: + get: + operationId: getProtectedResource + security: + - BearerAuth: + - otherScope + responses: + '204': + description: no content + /protected_resource_401: + get: + operationId: getProtectedResource + security: + - BearerAuth: + - unauthorized + responses: + '401': + description: no content + /multiparamresource: + get: + operationId: getResource + parameters: + - name: id + in: query + required: true + schema: + type: integer + minimum: 10 + maximum: 100 + - name: id2 + required: true + in: query + schema: + type: integer + minimum: 10 + maximum: 100 + responses: + '200': + description: success + content: + application/json: + schema: + properties: + name: + type: string + id: + type: integer +components: + securitySchemes: + BearerAuth: + type: http + scheme: bearer + bearerFormat: JWT diff --git a/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bind.go b/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bind.go new file mode 100644 index 00000000..1551250d --- /dev/null +++ b/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bind.go @@ -0,0 +1,24 @@ +// Copyright 2021 DeepMap, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package runtime + +// Binder is the interface implemented by types that can be bound to a query string or a parameter string +// The input can be assumed to be a valid string. If you define a Bind method you are responsible for all +// data being completely bound to the type. +// +// By convention, to approximate the behavior of Bind functions themselves, +// Binder implements Bind("") as a no-op. +type Binder interface { + Bind(src string) error +} diff --git a/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bindform.go b/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bindform.go new file mode 100644 index 00000000..4f4fb76c --- /dev/null +++ b/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bindform.go @@ -0,0 +1,309 @@ +package runtime + +import ( + "encoding/json" + "errors" + "fmt" + "mime/multipart" + "net/url" + "reflect" + "strconv" + "strings" + + "github.com/deepmap/oapi-codegen/pkg/types" +) + +const tagName = "json" +const jsonContentType = "application/json" + +type RequestBodyEncoding struct { + ContentType string + Style string + Explode *bool +} + +func BindMultipart(ptr interface{}, reader multipart.Reader) error { + const defaultMemory = 32 << 20 + form, err := reader.ReadForm(defaultMemory) + if err != nil { + return err + } + return BindForm(ptr, form.Value, form.File, nil) +} + +func BindForm(ptr interface{}, form map[string][]string, files map[string][]*multipart.FileHeader, encodings map[string]RequestBodyEncoding) error { + ptrVal := reflect.Indirect(reflect.ValueOf(ptr)) + if ptrVal.Kind() != reflect.Struct { + return errors.New("form data body should be a struct") + } + tValue := ptrVal.Type() + + for i := 0; i < tValue.NumField(); i++ { + field := ptrVal.Field(i) + tag := tValue.Field(i).Tag.Get(tagName) + if !field.CanInterface() || tag == "-" { + continue + } + tag = strings.Split(tag, ",")[0] // extract the name of the tag + if encoding, ok := encodings[tag]; ok { + // custom encoding + values := form[tag] + if len(values) == 0 { + continue + } + value := values[0] + if encoding.ContentType != "" { + if strings.HasPrefix(encoding.ContentType, jsonContentType) { + if err := json.Unmarshal([]byte(value), ptr); err != nil { + return err + } + } + return errors.New("unsupported encoding, only application/json is supported") + } else { + var explode bool + if encoding.Explode != nil { + explode = *encoding.Explode + } + if err := BindStyledParameterWithLocation(encoding.Style, explode, tag, ParamLocationUndefined, value, field.Addr().Interface()); err != nil { + return err + } + } + } else { + // regular form data + if _, err := bindFormImpl(field, form, files, tag); err != nil { + return err + } + } + } + + return nil +} + +func MarshalForm(ptr interface{}, encodings map[string]RequestBodyEncoding) (url.Values, error) { + ptrVal := reflect.Indirect(reflect.ValueOf(ptr)) + if ptrVal.Kind() != reflect.Struct { + return nil, errors.New("form data body should be a struct") + } + tValue := ptrVal.Type() + result := make(url.Values) + for i := 0; i < tValue.NumField(); i++ { + field := ptrVal.Field(i) + tag := tValue.Field(i).Tag.Get(tagName) + if !field.CanInterface() || tag == "-" { + continue + } + omitEmpty := strings.HasSuffix(tag, ",omitempty") + if omitEmpty && field.IsZero() { + continue + } + tag = strings.Split(tag, ",")[0] // extract the name of the tag + if encoding, ok := encodings[tag]; ok && encoding.ContentType != "" { + if strings.HasPrefix(encoding.ContentType, jsonContentType) { + if data, err := json.Marshal(field); err != nil { //nolint:staticcheck + return nil, err + } else { + result[tag] = append(result[tag], string(data)) + } + } + return nil, errors.New("unsupported encoding, only application/json is supported") + } else { + marshalFormImpl(field, result, tag) + } + } + return result, nil +} + +func bindFormImpl(v reflect.Value, form map[string][]string, files map[string][]*multipart.FileHeader, name string) (bool, error) { + var hasData bool + switch v.Kind() { + case reflect.Interface: + return bindFormImpl(v.Elem(), form, files, name) + case reflect.Ptr: + ptrData := v.Elem() + if !ptrData.IsValid() { + ptrData = reflect.New(v.Type().Elem()) + } + ptrHasData, err := bindFormImpl(ptrData, form, files, name) + if err == nil && ptrHasData && !v.Elem().IsValid() { + v.Set(ptrData) + } + return ptrHasData, err + case reflect.Slice: + if files := append(files[name], files[name+"[]"]...); len(files) != 0 { + if _, ok := v.Interface().([]types.File); ok { + result := make([]types.File, len(files)) + for i, file := range files { + result[i].InitFromMultipart(file) + } + v.Set(reflect.ValueOf(result)) + hasData = true + } + } + indexedElementsCount := indexedElementsCount(form, files, name) + items := append(form[name], form[name+"[]"]...) + if indexedElementsCount+len(items) != 0 { + result := reflect.MakeSlice(v.Type(), indexedElementsCount+len(items), indexedElementsCount+len(items)) + for i := 0; i < indexedElementsCount; i++ { + if _, err := bindFormImpl(result.Index(i), form, files, fmt.Sprintf("%s[%v]", name, i)); err != nil { + return false, err + } + } + for i, item := range items { + if err := BindStringToObject(item, result.Index(indexedElementsCount+i).Addr().Interface()); err != nil { + return false, err + } + } + v.Set(result) + hasData = true + } + case reflect.Struct: + if files := files[name]; len(files) != 0 { + if file, ok := v.Interface().(types.File); ok { + file.InitFromMultipart(files[0]) + v.Set(reflect.ValueOf(file)) + return true, nil + } + } + for i := 0; i < v.NumField(); i++ { + field := v.Type().Field(i) + tag := field.Tag.Get(tagName) + if field.Name == "AdditionalProperties" && field.Type.Kind() == reflect.Map && tag == "-" { + additionalPropertiesHasData, err := bindAdditionalProperties(v.Field(i), v, form, files, name) + if err != nil { + return false, err + } + hasData = hasData || additionalPropertiesHasData + } + if !v.Field(i).CanInterface() || tag == "-" { + continue + } + tag = strings.Split(tag, ",")[0] // extract the name of the tag + fieldHasData, err := bindFormImpl(v.Field(i), form, files, fmt.Sprintf("%s[%s]", name, tag)) + if err != nil { + return false, err + } + hasData = hasData || fieldHasData + } + return hasData, nil + default: + value := form[name] + if len(value) != 0 { + return true, BindStringToObject(value[0], v.Addr().Interface()) + } + } + return hasData, nil +} + +func indexedElementsCount(form map[string][]string, files map[string][]*multipart.FileHeader, name string) int { + name += "[" + maxIndex := -1 + for k := range form { + if strings.HasPrefix(k, name) { + str := strings.TrimPrefix(k, name) + str = str[:strings.Index(str, "]")] + if idx, err := strconv.Atoi(str); err == nil { + if idx > maxIndex { + maxIndex = idx + } + } + } + } + for k := range files { + if strings.HasPrefix(k, name) { + str := strings.TrimPrefix(k, name) + str = str[:strings.Index(str, "]")] + if idx, err := strconv.Atoi(str); err == nil { + if idx > maxIndex { + maxIndex = idx + } + } + } + } + return maxIndex + 1 +} + +func bindAdditionalProperties(additionalProperties reflect.Value, parentStruct reflect.Value, form map[string][]string, files map[string][]*multipart.FileHeader, name string) (bool, error) { + hasData := false + valueType := additionalProperties.Type().Elem() + + // store all fixed properties in a set + fieldsSet := make(map[string]struct{}) + for i := 0; i < parentStruct.NumField(); i++ { + tag := parentStruct.Type().Field(i).Tag.Get(tagName) + if !parentStruct.Field(i).CanInterface() || tag == "-" { + continue + } + tag = strings.Split(tag, ",")[0] + fieldsSet[tag] = struct{}{} + } + + result := reflect.MakeMap(additionalProperties.Type()) + for k := range form { + if strings.HasPrefix(k, name+"[") { + key := strings.TrimPrefix(k, name+"[") + key = key[:strings.Index(key, "]")] + if _, ok := fieldsSet[key]; ok { + continue + } + value := reflect.New(valueType) + ptrHasData, err := bindFormImpl(value, form, files, fmt.Sprintf("%s[%s]", name, key)) + if err != nil { + return false, err + } + result.SetMapIndex(reflect.ValueOf(key), value.Elem()) + hasData = hasData || ptrHasData + } + } + for k := range files { + if strings.HasPrefix(k, name+"[") { + key := strings.TrimPrefix(k, name+"[") + key = key[:strings.Index(key, "]")] + if _, ok := fieldsSet[key]; ok { + continue + } + value := reflect.New(valueType) + result.SetMapIndex(reflect.ValueOf(key), value) + ptrHasData, err := bindFormImpl(value, form, files, fmt.Sprintf("%s[%s]", name, key)) + if err != nil { + return false, err + } + result.SetMapIndex(reflect.ValueOf(key), value.Elem()) + hasData = hasData || ptrHasData + } + } + if hasData { + additionalProperties.Set(result) + } + return hasData, nil +} + +func marshalFormImpl(v reflect.Value, result url.Values, name string) { + switch v.Kind() { + case reflect.Interface, reflect.Ptr: + marshalFormImpl(v.Elem(), result, name) + case reflect.Slice: + for i := 0; i < v.Len(); i++ { + elem := v.Index(i) + marshalFormImpl(elem, result, fmt.Sprintf("%s[%v]", name, i)) + } + case reflect.Struct: + for i := 0; i < v.NumField(); i++ { + field := v.Type().Field(i) + tag := field.Tag.Get(tagName) + if field.Name == "AdditionalProperties" && tag == "-" { + iter := v.MapRange() + for iter.Next() { + marshalFormImpl(iter.Value(), result, fmt.Sprintf("%s[%s]", name, iter.Key().String())) + } + continue + } + if !v.Field(i).CanInterface() || tag == "-" { + continue + } + tag = strings.Split(tag, ",")[0] // extract the name of the tag + marshalFormImpl(v.Field(i), result, fmt.Sprintf("%s[%s]", name, tag)) + } + default: + result[name] = append(result[name], fmt.Sprint(v.Interface())) + } +} diff --git a/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bindparam.go b/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bindparam.go new file mode 100644 index 00000000..4fc1b30d --- /dev/null +++ b/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bindparam.go @@ -0,0 +1,526 @@ +// Copyright 2019 DeepMap, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package runtime + +import ( + "encoding" + "encoding/json" + "errors" + "fmt" + "net/url" + "reflect" + "strings" + "time" + + "github.com/deepmap/oapi-codegen/pkg/types" +) + +// BindStyledParameter binds a parameter as described in the Path Parameters +// section here to a Go object: +// https://swagger.io/docs/specification/serialization/ +// It is a backward compatible function to clients generated with codegen +// up to version v1.5.5. v1.5.6+ calls the function below. +func BindStyledParameter(style string, explode bool, paramName string, + value string, dest interface{}) error { + return BindStyledParameterWithLocation(style, explode, paramName, ParamLocationUndefined, value, dest) +} + +// BindStyledParameterWithLocation binds a parameter as described in the Path Parameters +// section here to a Go object: +// https://swagger.io/docs/specification/serialization/ +func BindStyledParameterWithLocation(style string, explode bool, paramName string, + paramLocation ParamLocation, value string, dest interface{}) error { + + if value == "" { + return fmt.Errorf("parameter '%s' is empty, can't bind its value", paramName) + } + + // Based on the location of the parameter, we need to unescape it properly. + var err error + switch paramLocation { + case ParamLocationQuery, ParamLocationUndefined: + // We unescape undefined parameter locations here for older generated code, + // since prior to this refactoring, they always query unescaped. + value, err = url.QueryUnescape(value) + if err != nil { + return fmt.Errorf("error unescaping query parameter '%s': %v", paramName, err) + } + case ParamLocationPath: + value, err = url.PathUnescape(value) + if err != nil { + return fmt.Errorf("error unescaping path parameter '%s': %v", paramName, err) + } + default: + // Headers and cookies aren't escaped. + } + + // If the destination implements encoding.TextUnmarshaler we use it for binding + if tu, ok := dest.(encoding.TextUnmarshaler); ok { + if err := tu.UnmarshalText([]byte(value)); err != nil { + return fmt.Errorf("error unmarshalling '%s' text as %T: %s", value, dest, err) + } + + return nil + } + + // Everything comes in by pointer, dereference it + v := reflect.Indirect(reflect.ValueOf(dest)) + + // This is the basic type of the destination object. + t := v.Type() + + if t.Kind() == reflect.Struct { + // We've got a destination object, we'll create a JSON representation + // of the input value, and let the json library deal with the unmarshalling + parts, err := splitStyledParameter(style, explode, true, paramName, value) + if err != nil { + return err + } + + return bindSplitPartsToDestinationStruct(paramName, parts, explode, dest) + } + + if t.Kind() == reflect.Slice { + // Chop up the parameter into parts based on its style + parts, err := splitStyledParameter(style, explode, false, paramName, value) + if err != nil { + return fmt.Errorf("error splitting input '%s' into parts: %s", value, err) + } + + return bindSplitPartsToDestinationArray(parts, dest) + } + + // Try to bind the remaining types as a base type. + return BindStringToObject(value, dest) +} + +// This is a complex set of operations, but each given parameter style can be +// packed together in multiple ways, using different styles of separators, and +// different packing strategies based on the explode flag. This function takes +// as input any parameter format, and unpacks it to a simple list of strings +// or key-values which we can then treat generically. +// Why, oh why, great Swagger gods, did you have to make this so complicated? +func splitStyledParameter(style string, explode bool, object bool, paramName string, value string) ([]string, error) { + switch style { + case "simple": + // In the simple case, we always split on comma + parts := strings.Split(value, ",") + return parts, nil + case "label": + // In the label case, it's more tricky. In the no explode case, we have + // /users/.3,4,5 for arrays + // /users/.role,admin,firstName,Alex for objects + // in the explode case, we have: + // /users/.3.4.5 + // /users/.role=admin.firstName=Alex + if explode { + // In the exploded case, split everything on periods. + parts := strings.Split(value, ".") + // The first part should be an empty string because we have a + // leading period. + if parts[0] != "" { + return nil, fmt.Errorf("invalid format for label parameter '%s', should start with '.'", paramName) + } + return parts[1:], nil + + } else { + // In the unexploded case, we strip off the leading period. + if value[0] != '.' { + return nil, fmt.Errorf("invalid format for label parameter '%s', should start with '.'", paramName) + } + // The rest is comma separated. + return strings.Split(value[1:], ","), nil + } + + case "matrix": + if explode { + // In the exploded case, we break everything up on semicolon + parts := strings.Split(value, ";") + // The first part should always be empty string, since we started + // with ;something + if parts[0] != "" { + return nil, fmt.Errorf("invalid format for matrix parameter '%s', should start with ';'", paramName) + } + parts = parts[1:] + // Now, if we have an object, we just have a list of x=y statements. + // for a non-object, like an array, we have id=x, id=y. id=z, etc, + // so we need to strip the prefix from each of them. + if !object { + prefix := paramName + "=" + for i := range parts { + parts[i] = strings.TrimPrefix(parts[i], prefix) + } + } + return parts, nil + } else { + // In the unexploded case, parameters will start with ;paramName= + prefix := ";" + paramName + "=" + if !strings.HasPrefix(value, prefix) { + return nil, fmt.Errorf("expected parameter '%s' to start with %s", paramName, prefix) + } + str := strings.TrimPrefix(value, prefix) + return strings.Split(str, ","), nil + } + case "form": + var parts []string + if explode { + parts = strings.Split(value, "&") + if !object { + prefix := paramName + "=" + for i := range parts { + parts[i] = strings.TrimPrefix(parts[i], prefix) + } + } + return parts, nil + } else { + parts = strings.Split(value, ",") + prefix := paramName + "=" + for i := range parts { + parts[i] = strings.TrimPrefix(parts[i], prefix) + } + } + return parts, nil + } + + return nil, fmt.Errorf("unhandled parameter style: %s", style) +} + +// Given a set of values as a slice, create a slice to hold them all, and +// assign to each one by one. +func bindSplitPartsToDestinationArray(parts []string, dest interface{}) error { + // Everything comes in by pointer, dereference it + v := reflect.Indirect(reflect.ValueOf(dest)) + + // This is the basic type of the destination object. + t := v.Type() + + // We've got a destination array, bind each object one by one. + // This generates a slice of the correct element type and length to + // hold all the parts. + newArray := reflect.MakeSlice(t, len(parts), len(parts)) + for i, p := range parts { + err := BindStringToObject(p, newArray.Index(i).Addr().Interface()) + if err != nil { + return fmt.Errorf("error setting array element: %s", err) + } + } + v.Set(newArray) + return nil +} + +// Given a set of chopped up parameter parts, bind them to a destination +// struct. The exploded parameter controls whether we send key value pairs +// in the exploded case, or a sequence of values which are interpreted as +// tuples. +// Given the struct Id { firstName string, role string }, as in the canonical +// swagger examples, in the exploded case, we would pass +// ["firstName=Alex", "role=admin"], where in the non-exploded case, we would +// pass "firstName", "Alex", "role", "admin"] +// +// We punt the hard work of binding these values to the object to the json +// library. We'll turn those arrays into JSON strings, and unmarshal +// into the struct. +func bindSplitPartsToDestinationStruct(paramName string, parts []string, explode bool, dest interface{}) error { + // We've got a destination object, we'll create a JSON representation + // of the input value, and let the json library deal with the unmarshalling + var fields []string + if explode { + fields = make([]string, len(parts)) + for i, property := range parts { + propertyParts := strings.Split(property, "=") + if len(propertyParts) != 2 { + return fmt.Errorf("parameter '%s' has invalid exploded format", paramName) + } + fields[i] = "\"" + propertyParts[0] + "\":\"" + propertyParts[1] + "\"" + } + } else { + if len(parts)%2 != 0 { + return fmt.Errorf("parameter '%s' has invalid format, property/values need to be pairs", paramName) + } + fields = make([]string, len(parts)/2) + for i := 0; i < len(parts); i += 2 { + key := parts[i] + value := parts[i+1] + fields[i/2] = "\"" + key + "\":\"" + value + "\"" + } + } + jsonParam := "{" + strings.Join(fields, ",") + "}" + err := json.Unmarshal([]byte(jsonParam), dest) + if err != nil { + return fmt.Errorf("error binding parameter %s fields: %s", paramName, err) + } + return nil +} + +// BindQueryParameter works much like BindStyledParameter, however it takes a query argument +// input array from the url package, since query arguments come through a +// different path than the styled arguments. They're also exceptionally fussy. +// For example, consider the exploded and unexploded form parameter examples: +// (exploded) /users?role=admin&firstName=Alex +// (unexploded) /users?id=role,admin,firstName,Alex +// +// In the first case, we can pull the "id" parameter off the context, +// and unmarshal via json as an intermediate. Easy. In the second case, we +// don't have the id QueryParam present, but must find "role", and "firstName". +// what if there is another parameter similar to "ID" named "role"? We can't +// tell them apart. This code tries to fail, but the moral of the story is that +// you shouldn't pass objects via form styled query arguments, just use +// the Content parameter form. +func BindQueryParameter(style string, explode bool, required bool, paramName string, + queryParams url.Values, dest interface{}) error { + + // dv = destination value. + dv := reflect.Indirect(reflect.ValueOf(dest)) + + // intermediate value form which is either dv or dv dereferenced. + v := dv + + // inner code will bind the string's value to this interface. + var output interface{} + + if required { + // If the parameter is required, then the generated code will pass us + // a pointer to it: &int, &object, and so forth. We can directly set + // them. + output = dest + } else { + // For optional parameters, we have an extra indirect. An optional + // parameter of type "int" will be *int on the struct. We pass that + // in by pointer, and have **int. + + // If the destination, is a nil pointer, we need to allocate it. + if v.IsNil() { + t := v.Type() + newValue := reflect.New(t.Elem()) + // for now, hang onto the output buffer separately from destination, + // as we don't want to write anything to destination until we can + // unmarshal successfully, and check whether a field is required. + output = newValue.Interface() + } else { + // If the destination isn't nil, just use that. + output = v.Interface() + } + + // Get rid of that extra indirect as compared to the required case, + // so the code below doesn't have to care. + v = reflect.Indirect(reflect.ValueOf(output)) + } + + // This is the basic type of the destination object. + t := v.Type() + k := t.Kind() + + switch style { + case "form": + var parts []string + if explode { + // ok, the explode case in query arguments is very, very annoying, + // because an exploded object, such as /users?role=admin&firstName=Alex + // isn't actually present in the parameter array. We have to do + // different things based on destination type. + values, found := queryParams[paramName] + var err error + + switch k { + case reflect.Slice: + // In the slice case, we simply use the arguments provided by + // http library. + + if !found { + if required { + return fmt.Errorf("query parameter '%s' is required", paramName) + } else { + // If an optional parameter is not found, we do nothing, + return nil + } + } + err = bindSplitPartsToDestinationArray(values, output) + case reflect.Struct: + // This case is really annoying, and error prone, but the + // form style object binding doesn't tell us which arguments + // in the query string correspond to the object's fields. We'll + // try to bind field by field. + var fieldsPresent bool + fieldsPresent, err = bindParamsToExplodedObject(paramName, queryParams, output) + // If no fields were set, and there is no error, we will not fall + // through to assign the destination. + if !fieldsPresent { + return nil + } + default: + // Primitive object case. We expect to have 1 value to + // unmarshal. + if len(values) == 0 { + if required { + return fmt.Errorf("query parameter '%s' is required", paramName) + } else { + return nil + } + } + if len(values) != 1 { + return fmt.Errorf("multiple values for single value parameter '%s'", paramName) + } + + if !found { + if required { + return fmt.Errorf("query parameter '%s' is required", paramName) + } else { + // If an optional parameter is not found, we do nothing, + return nil + } + } + err = BindStringToObject(values[0], output) + } + if err != nil { + return err + } + // If the parameter is required, and we've successfully unmarshaled + // it, this assigns the new object to the pointer pointer. + if !required { + dv.Set(reflect.ValueOf(output)) + } + return nil + } else { + values, found := queryParams[paramName] + if !found { + if required { + return fmt.Errorf("query parameter '%s' is required", paramName) + } else { + return nil + } + } + if len(values) != 1 { + return fmt.Errorf("parameter '%s' is not exploded, but is specified multiple times", paramName) + } + parts = strings.Split(values[0], ",") + } + var err error + switch k { + case reflect.Slice: + err = bindSplitPartsToDestinationArray(parts, output) + case reflect.Struct: + err = bindSplitPartsToDestinationStruct(paramName, parts, explode, output) + default: + if len(parts) == 0 { + if required { + return fmt.Errorf("query parameter '%s' is required", paramName) + } else { + return nil + } + } + if len(parts) != 1 { + return fmt.Errorf("multiple values for single value parameter '%s'", paramName) + } + err = BindStringToObject(parts[0], output) + } + if err != nil { + return err + } + if !required { + dv.Set(reflect.ValueOf(output)) + } + return nil + case "deepObject": + if !explode { + return errors.New("deepObjects must be exploded") + } + return UnmarshalDeepObject(dest, paramName, queryParams) + case "spaceDelimited", "pipeDelimited": + return fmt.Errorf("query arguments of style '%s' aren't yet supported", style) + default: + return fmt.Errorf("style '%s' on parameter '%s' is invalid", style, paramName) + + } +} + +// bindParamsToExplodedObject reflects the destination structure, and pulls the value for +// each settable field from the given parameters map. This is to deal with the +// exploded form styled object which may occupy any number of parameter names. +// We don't try to be smart here, if the field exists as a query argument, +// set its value. This function returns a boolean, telling us whether there was +// anything to bind. There will be nothing to bind if a parameter isn't found by name, +// or none of an exploded object's fields are present. +func bindParamsToExplodedObject(paramName string, values url.Values, dest interface{}) (bool, error) { + // Dereference pointers to their destination values + binder, v, t := indirect(dest) + if binder != nil { + _, found := values[paramName] + if !found { + return false, nil + } + return true, BindStringToObject(values.Get(paramName), dest) + } + if t.Kind() != reflect.Struct { + return false, fmt.Errorf("unmarshalling query arg '%s' into wrong type", paramName) + } + + fieldsPresent := false + for i := 0; i < t.NumField(); i++ { + fieldT := t.Field(i) + + // Skip unsettable fields, such as internal ones. + if !v.Field(i).CanSet() { + continue + } + + // Find the json annotation on the field, and use the json specified + // name if available, otherwise, just the field name. + tag := fieldT.Tag.Get("json") + fieldName := fieldT.Name + if tag != "" { + tagParts := strings.Split(tag, ",") + name := tagParts[0] + if name != "" { + fieldName = name + } + } + + // At this point, we look up field name in the parameter list. + fieldVal, found := values[fieldName] + if found { + if len(fieldVal) != 1 { + return false, fmt.Errorf("field '%s' specified multiple times for param '%s'", fieldName, paramName) + } + err := BindStringToObject(fieldVal[0], v.Field(i).Addr().Interface()) + if err != nil { + return false, fmt.Errorf("could not bind query arg '%s' to request object: %s'", paramName, err) + } + fieldsPresent = true + } + } + return fieldsPresent, nil +} + +// indirect +func indirect(dest interface{}) (interface{}, reflect.Value, reflect.Type) { + v := reflect.ValueOf(dest) + if v.Type().NumMethod() > 0 && v.CanInterface() { + if u, ok := v.Interface().(Binder); ok { + return u, reflect.Value{}, nil + } + } + v = reflect.Indirect(v) + t := v.Type() + // special handling for custom types which might look like an object. We + // don't want to use object binding on them, but rather treat them as + // primitive types. time.Time{} is a unique case since we can't add a Binder + // to it without changing the underlying generated code. + if t.ConvertibleTo(reflect.TypeOf(time.Time{})) { + return dest, reflect.Value{}, nil + } + if t.ConvertibleTo(reflect.TypeOf(types.Date{})) { + return dest, reflect.Value{}, nil + } + return nil, v, t +} diff --git a/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bindstring.go b/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bindstring.go new file mode 100644 index 00000000..72f74dd1 --- /dev/null +++ b/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bindstring.go @@ -0,0 +1,174 @@ +// Copyright 2019 DeepMap, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package runtime + +import ( + "encoding" + "errors" + "fmt" + "reflect" + "strconv" + "time" + + "github.com/deepmap/oapi-codegen/pkg/types" +) + +// BindStringToObject takes a string, and attempts to assign it to the destination +// interface via whatever type conversion is necessary. We have to do this +// via reflection instead of a much simpler type switch so that we can handle +// type aliases. This function was the easy way out, the better way, since we +// know the destination type each place that we use this, is to generate code +// to read each specific type. +func BindStringToObject(src string, dst interface{}) error { + var err error + + v := reflect.ValueOf(dst) + t := reflect.TypeOf(dst) + + // We need to dereference pointers + if t.Kind() == reflect.Ptr { + v = reflect.Indirect(v) + t = v.Type() + } + + // For some optioinal args + if t.Kind() == reflect.Ptr { + if v.IsNil() { + v.Set(reflect.New(t.Elem())) + } + + v = reflect.Indirect(v) + t = v.Type() + } + + // The resulting type must be settable. reflect will catch issues like + // passing the destination by value. + if !v.CanSet() { + return errors.New("destination is not settable") + } + + switch t.Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + var val int64 + val, err = strconv.ParseInt(src, 10, 64) + if err == nil { + if v.OverflowInt(val) { + err = fmt.Errorf("value '%s' overflows destination of type: %s", src, t.Kind()) + } + if err == nil { + v.SetInt(val) + } + } + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + var val uint64 + val, err = strconv.ParseUint(src, 10, 64) + if err == nil { + if v.OverflowUint(val) { + err = fmt.Errorf("value '%s' overflows destination of type: %s", src, t.Kind()) + } + v.SetUint(val) + } + case reflect.String: + v.SetString(src) + err = nil + case reflect.Float64, reflect.Float32: + var val float64 + val, err = strconv.ParseFloat(src, 64) + if err == nil { + if v.OverflowFloat(val) { + err = fmt.Errorf("value '%s' overflows destination of type: %s", src, t.Kind()) + } + v.SetFloat(val) + } + case reflect.Bool: + var val bool + val, err = strconv.ParseBool(src) + if err == nil { + v.SetBool(val) + } + case reflect.Array: + if tu, ok := dst.(encoding.TextUnmarshaler); ok { + if err := tu.UnmarshalText([]byte(src)); err != nil { + return fmt.Errorf("error unmarshalling '%s' text as %T: %s", src, dst, err) + } + + return nil + } + fallthrough + case reflect.Struct: + // if this is not of type Time or of type Date look to see if this is of type Binder. + if dstType, ok := dst.(Binder); ok { + return dstType.Bind(src) + } + + if t.ConvertibleTo(reflect.TypeOf(time.Time{})) { + // Don't fail on empty string. + if src == "" { + return nil + } + // Time is a special case of a struct that we handle + parsedTime, err := time.Parse(time.RFC3339Nano, src) + if err != nil { + parsedTime, err = time.Parse(types.DateFormat, src) + if err != nil { + return fmt.Errorf("error parsing '%s' as RFC3339 or 2006-01-02 time: %s", src, err) + } + } + // So, assigning this gets a little fun. We have a value to the + // dereference destination. We can't do a conversion to + // time.Time because the result isn't assignable, so we need to + // convert pointers. + if t != reflect.TypeOf(time.Time{}) { + vPtr := v.Addr() + vtPtr := vPtr.Convert(reflect.TypeOf(&time.Time{})) + v = reflect.Indirect(vtPtr) + } + v.Set(reflect.ValueOf(parsedTime)) + return nil + } + + if t.ConvertibleTo(reflect.TypeOf(types.Date{})) { + // Don't fail on empty string. + if src == "" { + return nil + } + parsedTime, err := time.Parse(types.DateFormat, src) + if err != nil { + return fmt.Errorf("error parsing '%s' as date: %s", src, err) + } + parsedDate := types.Date{Time: parsedTime} + + // We have to do the same dance here to assign, just like with times + // above. + if t != reflect.TypeOf(types.Date{}) { + vPtr := v.Addr() + vtPtr := vPtr.Convert(reflect.TypeOf(&types.Date{})) + v = reflect.Indirect(vtPtr) + } + v.Set(reflect.ValueOf(parsedDate)) + return nil + } + + // We fall through to the error case below if we haven't handled the + // destination type above. + fallthrough + default: + // We've got a bunch of types unimplemented, don't fail silently. + err = fmt.Errorf("can not bind to destination of type: %s", t.Kind()) + } + if err != nil { + return fmt.Errorf("error binding string parameter: %s", err) + } + return nil +} diff --git a/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/deepobject.go b/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/deepobject.go new file mode 100644 index 00000000..f18ace37 --- /dev/null +++ b/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/deepobject.go @@ -0,0 +1,358 @@ +package runtime + +import ( + "encoding/json" + "errors" + "fmt" + "net/url" + "reflect" + "sort" + "strconv" + "strings" + "time" + + "github.com/deepmap/oapi-codegen/pkg/types" +) + +func marshalDeepObject(in interface{}, path []string) ([]string, error) { + var result []string + + switch t := in.(type) { + case []interface{}: + // For the array, we will use numerical subscripts of the form [x], + // in the same order as the array. + for i, iface := range t { + newPath := append(path, strconv.Itoa(i)) + fields, err := marshalDeepObject(iface, newPath) + if err != nil { + return nil, fmt.Errorf("error traversing array: %w", err) + } + result = append(result, fields...) + } + case map[string]interface{}: + // For a map, each key (field name) becomes a member of the path, and + // we recurse. First, sort the keys. + keys := make([]string, len(t)) + i := 0 + for k := range t { + keys[i] = k + i++ + } + sort.Strings(keys) + + // Now, for each key, we recursively marshal it. + for _, k := range keys { + newPath := append(path, k) + fields, err := marshalDeepObject(t[k], newPath) + if err != nil { + return nil, fmt.Errorf("error traversing map: %w", err) + } + result = append(result, fields...) + } + default: + // Now, for a concrete value, we will turn the path elements + // into a deepObject style set of subscripts. [a, b, c] turns into + // [a][b][c] + prefix := "[" + strings.Join(path, "][") + "]" + result = []string{ + prefix + fmt.Sprintf("=%v", t), + } + } + return result, nil +} + +func MarshalDeepObject(i interface{}, paramName string) (string, error) { + // We're going to marshal to JSON and unmarshal into an interface{}, + // which will use the json pkg to deal with all the field annotations. We + // can then walk the generic object structure to produce a deepObject. This + // isn't efficient and it would be more efficient to reflect on our own, + // but it's complicated, error-prone code. + buf, err := json.Marshal(i) + if err != nil { + return "", fmt.Errorf("failed to marshal input to JSON: %w", err) + } + var i2 interface{} + err = json.Unmarshal(buf, &i2) + if err != nil { + return "", fmt.Errorf("failed to unmarshal JSON: %w", err) + } + fields, err := marshalDeepObject(i2, nil) + if err != nil { + return "", fmt.Errorf("error traversing JSON structure: %w", err) + } + + // Prefix the param name to each subscripted field. + for i := range fields { + fields[i] = paramName + fields[i] + } + return strings.Join(fields, "&"), nil +} + +type fieldOrValue struct { + fields map[string]fieldOrValue + value string +} + +func (f *fieldOrValue) appendPathValue(path []string, value string) { + fieldName := path[0] + if len(path) == 1 { + f.fields[fieldName] = fieldOrValue{value: value} + return + } + + pv, found := f.fields[fieldName] + if !found { + pv = fieldOrValue{ + fields: make(map[string]fieldOrValue), + } + f.fields[fieldName] = pv + } + pv.appendPathValue(path[1:], value) +} + +func makeFieldOrValue(paths [][]string, values []string) fieldOrValue { + + f := fieldOrValue{ + fields: make(map[string]fieldOrValue), + } + for i := range paths { + path := paths[i] + value := values[i] + f.appendPathValue(path, value) + } + return f +} + +func UnmarshalDeepObject(dst interface{}, paramName string, params url.Values) error { + // Params are all the query args, so we need those that look like + // "paramName["... + var fieldNames []string + var fieldValues []string + searchStr := paramName + "[" + for pName, pValues := range params { + if strings.HasPrefix(pName, searchStr) { + // trim the parameter name from the full name. + pName = pName[len(paramName):] + fieldNames = append(fieldNames, pName) + if len(pValues) != 1 { + return fmt.Errorf("%s has multiple values", pName) + } + fieldValues = append(fieldValues, pValues[0]) + } + } + + // Now, for each field, reconstruct its subscript path and value + paths := make([][]string, len(fieldNames)) + for i, path := range fieldNames { + path = strings.TrimLeft(path, "[") + path = strings.TrimRight(path, "]") + paths[i] = strings.Split(path, "][") + } + + fieldPaths := makeFieldOrValue(paths, fieldValues) + err := assignPathValues(dst, fieldPaths) + if err != nil { + return fmt.Errorf("error assigning value to destination: %w", err) + } + + return nil +} + +// This returns a field name, either using the variable name, or the json +// annotation if that exists. +func getFieldName(f reflect.StructField) string { + n := f.Name + tag, found := f.Tag.Lookup("json") + if found { + // If we have a json field, and the first part of it before the + // first comma is non-empty, that's our field name. + parts := strings.Split(tag, ",") + if parts[0] != "" { + n = parts[0] + } + } + return n +} + +// Create a map of field names that we'll see in the deepObject to reflect +// field indices on the given type. +func fieldIndicesByJsonTag(i interface{}) (map[string]int, error) { + t := reflect.TypeOf(i) + if t.Kind() != reflect.Struct { + return nil, errors.New("expected a struct as input") + } + + n := t.NumField() + fieldMap := make(map[string]int) + for i := 0; i < n; i++ { + field := t.Field(i) + fieldName := getFieldName(field) + fieldMap[fieldName] = i + } + return fieldMap, nil +} + +func assignPathValues(dst interface{}, pathValues fieldOrValue) error { + //t := reflect.TypeOf(dst) + v := reflect.ValueOf(dst) + + iv := reflect.Indirect(v) + it := iv.Type() + + switch it.Kind() { + case reflect.Slice: + sliceLength := len(pathValues.fields) + dstSlice := reflect.MakeSlice(it, sliceLength, sliceLength) + err := assignSlice(dstSlice, pathValues) + if err != nil { + return fmt.Errorf("error assigning slice: %w", err) + } + iv.Set(dstSlice) + return nil + case reflect.Struct: + // Some special types we care about are structs. Handle them + // here. They may be redefined, so we need to do some hoop + // jumping. If the types are aliased, we need to type convert + // the pointer, then set the value of the dereference pointer. + + // We check to see if the object implements the Binder interface first. + if dst, isBinder := v.Interface().(Binder); isBinder { + return dst.Bind(pathValues.value) + } + // Then check the legacy types + if it.ConvertibleTo(reflect.TypeOf(types.Date{})) { + var date types.Date + var err error + date.Time, err = time.Parse(types.DateFormat, pathValues.value) + if err != nil { + return fmt.Errorf("invalid date format: %w", err) + } + dst := iv + if it != reflect.TypeOf(types.Date{}) { + // Types are aliased, convert the pointers. + ivPtr := iv.Addr() + aPtr := ivPtr.Convert(reflect.TypeOf(&types.Date{})) + dst = reflect.Indirect(aPtr) + } + dst.Set(reflect.ValueOf(date)) + } + if it.ConvertibleTo(reflect.TypeOf(time.Time{})) { + var tm time.Time + var err error + tm, err = time.Parse(time.RFC3339Nano, pathValues.value) + if err != nil { + // Fall back to parsing it as a date. + // TODO: why is this marked as an ineffassign? + tm, err = time.Parse(types.DateFormat, pathValues.value) //nolint:ineffassign,staticcheck + if err != nil { + return fmt.Errorf("error parsing tim as RFC3339 or 2006-01-02 time: %s", err) + } + return fmt.Errorf("invalid date format: %w", err) + } + dst := iv + if it != reflect.TypeOf(time.Time{}) { + // Types are aliased, convert the pointers. + ivPtr := iv.Addr() + aPtr := ivPtr.Convert(reflect.TypeOf(&time.Time{})) + dst = reflect.Indirect(aPtr) + } + dst.Set(reflect.ValueOf(tm)) + } + fieldMap, err := fieldIndicesByJsonTag(iv.Interface()) + if err != nil { + return fmt.Errorf("failed enumerating fields: %w", err) + } + for _, fieldName := range sortedFieldOrValueKeys(pathValues.fields) { + fieldValue := pathValues.fields[fieldName] + fieldIndex, found := fieldMap[fieldName] + if !found { + return fmt.Errorf("field [%s] is not present in destination object", fieldName) + } + field := iv.Field(fieldIndex) + err = assignPathValues(field.Addr().Interface(), fieldValue) + if err != nil { + return fmt.Errorf("error assigning field [%s]: %w", fieldName, err) + } + } + return nil + case reflect.Ptr: + // If we have a pointer after redirecting, it means we're dealing with + // an optional field, such as *string, which was passed in as &foo. We + // will allocate it if necessary, and call ourselves with a different + // interface. + dstVal := reflect.New(it.Elem()) + dstPtr := dstVal.Interface() + err := assignPathValues(dstPtr, pathValues) + iv.Set(dstVal) + return err + case reflect.Bool: + val, err := strconv.ParseBool(pathValues.value) + if err != nil { + return fmt.Errorf("expected a valid bool, got %s", pathValues.value) + } + iv.SetBool(val) + return nil + case reflect.Float32: + val, err := strconv.ParseFloat(pathValues.value, 32) + if err != nil { + return fmt.Errorf("expected a valid float, got %s", pathValues.value) + } + iv.SetFloat(val) + return nil + case reflect.Float64: + val, err := strconv.ParseFloat(pathValues.value, 64) + if err != nil { + return fmt.Errorf("expected a valid float, got %s", pathValues.value) + } + iv.SetFloat(val) + return nil + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + val, err := strconv.ParseInt(pathValues.value, 10, 64) + if err != nil { + return fmt.Errorf("expected a valid int, got %s", pathValues.value) + } + iv.SetInt(val) + return nil + case reflect.String: + iv.SetString(pathValues.value) + return nil + default: + return errors.New("unhandled type: " + it.String()) + } +} + +func assignSlice(dst reflect.Value, pathValues fieldOrValue) error { + // Gather up the values + nValues := len(pathValues.fields) + values := make([]string, nValues) + // We expect to have consecutive array indices in the map + for i := 0; i < nValues; i++ { + indexStr := strconv.Itoa(i) + fv, found := pathValues.fields[indexStr] + if !found { + return errors.New("array deepObjects must have consecutive indices") + } + values[i] = fv.value + } + + // This could be cleaner, but we can call into assignPathValues to + // avoid recreating this logic. + for i := 0; i < nValues; i++ { + dstElem := dst.Index(i).Addr() + err := assignPathValues(dstElem.Interface(), fieldOrValue{value: values[i]}) + if err != nil { + return fmt.Errorf("error binding array: %w", err) + } + } + + return nil +} + +func sortedFieldOrValueKeys(m map[string]fieldOrValue) []string { + keys := make([]string, 0, len(m)) + for k := range m { + keys = append(keys, k) + } + sort.Strings(keys) + return keys +} diff --git a/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/jsonmerge.go b/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/jsonmerge.go new file mode 100644 index 00000000..155abfbe --- /dev/null +++ b/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/jsonmerge.go @@ -0,0 +1,26 @@ +package runtime + +import ( + "encoding/json" + + "github.com/apapsch/go-jsonmerge/v2" +) + +// JsonMerge merges two JSON representation into a single object. `data` is the +// existing representation and `patch` is the new data to be merged in +func JsonMerge(data, patch json.RawMessage) (json.RawMessage, error) { + merger := jsonmerge.Merger{ + CopyNonexistent: true, + } + if data == nil { + data = []byte(`{}`) + } + if patch == nil { + patch = []byte(`{}`) + } + merged, err := merger.MergeBytes(data, patch) + if err != nil { + return nil, err + } + return merged, nil +} diff --git a/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/styleparam.go b/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/styleparam.go new file mode 100644 index 00000000..8f7e1292 --- /dev/null +++ b/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/runtime/styleparam.go @@ -0,0 +1,473 @@ +// Copyright 2019 DeepMap, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package runtime + +import ( + "bytes" + "encoding" + "encoding/json" + "errors" + "fmt" + "net/url" + "reflect" + "sort" + "strconv" + "strings" + "time" + + "github.com/deepmap/oapi-codegen/pkg/types" +) + +// Parameter escaping works differently based on where a header is found + +type ParamLocation int + +const ( + ParamLocationUndefined ParamLocation = iota + ParamLocationQuery + ParamLocationPath + ParamLocationHeader + ParamLocationCookie +) + +// StyleParam is used by older generated code, and must remain compatible +// with that code. It is not to be used in new templates. Please see the +// function below, which can specialize its output based on the location of +// the parameter. +func StyleParam(style string, explode bool, paramName string, value interface{}) (string, error) { + return StyleParamWithLocation(style, explode, paramName, ParamLocationUndefined, value) +} + +// Given an input value, such as a primitive type, array or object, turn it +// into a parameter based on style/explode definition, performing whatever +// escaping is necessary based on parameter location +func StyleParamWithLocation(style string, explode bool, paramName string, paramLocation ParamLocation, value interface{}) (string, error) { + t := reflect.TypeOf(value) + v := reflect.ValueOf(value) + + // Things may be passed in by pointer, we need to dereference, so return + // error on nil. + if t.Kind() == reflect.Ptr { + if v.IsNil() { + return "", fmt.Errorf("value is a nil pointer") + } + v = reflect.Indirect(v) + t = v.Type() + } + + // If the value implements encoding.TextMarshaler we use it for marshaling + // https://github.com/deepmap/oapi-codegen/issues/504 + if tu, ok := value.(encoding.TextMarshaler); ok { + t := reflect.Indirect(reflect.ValueOf(value)).Type() + convertableToTime := t.ConvertibleTo(reflect.TypeOf(time.Time{})) + convertableToDate := t.ConvertibleTo(reflect.TypeOf(types.Date{})) + + // Since both time.Time and types.Date implement encoding.TextMarshaler + // we should avoid calling theirs MarshalText() + if !convertableToTime && !convertableToDate { + b, err := tu.MarshalText() + if err != nil { + return "", fmt.Errorf("error marshaling '%s' as text: %s", value, err) + } + + return stylePrimitive(style, explode, paramName, paramLocation, string(b)) + } + } + + switch t.Kind() { + case reflect.Slice: + n := v.Len() + sliceVal := make([]interface{}, n) + for i := 0; i < n; i++ { + sliceVal[i] = v.Index(i).Interface() + } + return styleSlice(style, explode, paramName, paramLocation, sliceVal) + case reflect.Struct: + return styleStruct(style, explode, paramName, paramLocation, value) + case reflect.Map: + return styleMap(style, explode, paramName, paramLocation, value) + default: + return stylePrimitive(style, explode, paramName, paramLocation, value) + } +} + +func styleSlice(style string, explode bool, paramName string, paramLocation ParamLocation, values []interface{}) (string, error) { + if style == "deepObject" { + if !explode { + return "", errors.New("deepObjects must be exploded") + } + return MarshalDeepObject(values, paramName) + } + + var prefix string + var separator string + + switch style { + case "simple": + separator = "," + case "label": + prefix = "." + if explode { + separator = "." + } else { + separator = "," + } + case "matrix": + prefix = fmt.Sprintf(";%s=", paramName) + if explode { + separator = prefix + } else { + separator = "," + } + case "form": + prefix = fmt.Sprintf("%s=", paramName) + if explode { + separator = "&" + prefix + } else { + separator = "," + } + case "spaceDelimited": + prefix = fmt.Sprintf("%s=", paramName) + if explode { + separator = "&" + prefix + } else { + separator = " " + } + case "pipeDelimited": + prefix = fmt.Sprintf("%s=", paramName) + if explode { + separator = "&" + prefix + } else { + separator = "|" + } + default: + return "", fmt.Errorf("unsupported style '%s'", style) + } + + // We're going to assume here that the array is one of simple types. + var err error + var part string + parts := make([]string, len(values)) + for i, v := range values { + part, err = primitiveToString(v) + part = escapeParameterString(part, paramLocation) + parts[i] = part + if err != nil { + return "", fmt.Errorf("error formatting '%s': %s", paramName, err) + } + } + return prefix + strings.Join(parts, separator), nil +} + +func sortedKeys(strMap map[string]string) []string { + keys := make([]string, len(strMap)) + i := 0 + for k := range strMap { + keys[i] = k + i++ + } + sort.Strings(keys) + return keys +} + +// These are special cases. The value may be a date, time, or uuid, +// in which case, marshal it into the correct format. +func marshalKnownTypes(value interface{}) (string, bool) { + v := reflect.Indirect(reflect.ValueOf(value)) + t := v.Type() + + if t.ConvertibleTo(reflect.TypeOf(time.Time{})) { + tt := v.Convert(reflect.TypeOf(time.Time{})) + timeVal := tt.Interface().(time.Time) + return timeVal.Format(time.RFC3339Nano), true + } + + if t.ConvertibleTo(reflect.TypeOf(types.Date{})) { + d := v.Convert(reflect.TypeOf(types.Date{})) + dateVal := d.Interface().(types.Date) + return dateVal.Format(types.DateFormat), true + } + + if t.ConvertibleTo(reflect.TypeOf(types.UUID{})) { + u := v.Convert(reflect.TypeOf(types.UUID{})) + uuidVal := u.Interface().(types.UUID) + return uuidVal.String(), true + } + + return "", false +} + +func styleStruct(style string, explode bool, paramName string, paramLocation ParamLocation, value interface{}) (string, error) { + if timeVal, ok := marshalKnownTypes(value); ok { + styledVal, err := stylePrimitive(style, explode, paramName, paramLocation, timeVal) + if err != nil { + return "", fmt.Errorf("failed to style time: %w", err) + } + return styledVal, nil + } + + if style == "deepObject" { + if !explode { + return "", errors.New("deepObjects must be exploded") + } + return MarshalDeepObject(value, paramName) + } + + // If input has Marshaler, such as object has Additional Property or AnyOf, + // We use this Marshaler and convert into interface{} before styling. + if m, ok := value.(json.Marshaler); ok { + buf, err := m.MarshalJSON() + if err != nil { + return "", fmt.Errorf("failed to marshal input to JSON: %w", err) + } + e := json.NewDecoder(bytes.NewReader(buf)) + e.UseNumber() + var i2 interface{} + err = e.Decode(&i2) + if err != nil { + return "", fmt.Errorf("failed to unmarshal JSON: %w", err) + } + s, err := StyleParamWithLocation(style, explode, paramName, paramLocation, i2) + if err != nil { + return "", fmt.Errorf("error style JSON structure: %w", err) + } + return s, nil + } + + // Otherwise, we need to build a dictionary of the struct's fields. Each + // field may only be a primitive value. + v := reflect.ValueOf(value) + t := reflect.TypeOf(value) + fieldDict := make(map[string]string) + + for i := 0; i < t.NumField(); i++ { + fieldT := t.Field(i) + // Find the json annotation on the field, and use the json specified + // name if available, otherwise, just the field name. + tag := fieldT.Tag.Get("json") + fieldName := fieldT.Name + if tag != "" { + tagParts := strings.Split(tag, ",") + name := tagParts[0] + if name != "" { + fieldName = name + } + } + f := v.Field(i) + + // Unset optional fields will be nil pointers, skip over those. + if f.Type().Kind() == reflect.Ptr && f.IsNil() { + continue + } + str, err := primitiveToString(f.Interface()) + if err != nil { + return "", fmt.Errorf("error formatting '%s': %s", paramName, err) + } + fieldDict[fieldName] = str + } + + return processFieldDict(style, explode, paramName, paramLocation, fieldDict) +} + +func styleMap(style string, explode bool, paramName string, paramLocation ParamLocation, value interface{}) (string, error) { + if style == "deepObject" { + if !explode { + return "", errors.New("deepObjects must be exploded") + } + return MarshalDeepObject(value, paramName) + } + + dict, ok := value.(map[string]interface{}) + if !ok { + return "", errors.New("map not of type map[string]interface{}") + } + + fieldDict := make(map[string]string) + for fieldName, value := range dict { + str, err := primitiveToString(value) + if err != nil { + return "", fmt.Errorf("error formatting '%s': %s", paramName, err) + } + fieldDict[fieldName] = str + } + return processFieldDict(style, explode, paramName, paramLocation, fieldDict) +} + +func processFieldDict(style string, explode bool, paramName string, paramLocation ParamLocation, fieldDict map[string]string) (string, error) { + var parts []string + + // This works for everything except deepObject. We'll handle that one + // separately. + if style != "deepObject" { + if explode { + for _, k := range sortedKeys(fieldDict) { + v := escapeParameterString(fieldDict[k], paramLocation) + parts = append(parts, k+"="+v) + } + } else { + for _, k := range sortedKeys(fieldDict) { + v := escapeParameterString(fieldDict[k], paramLocation) + parts = append(parts, k) + parts = append(parts, v) + } + } + } + + var prefix string + var separator string + + switch style { + case "simple": + separator = "," + case "label": + prefix = "." + if explode { + separator = prefix + } else { + separator = "," + } + case "matrix": + if explode { + separator = ";" + prefix = ";" + } else { + separator = "," + prefix = fmt.Sprintf(";%s=", paramName) + } + case "form": + if explode { + separator = "&" + } else { + prefix = fmt.Sprintf("%s=", paramName) + separator = "," + } + case "deepObject": + { + if !explode { + return "", fmt.Errorf("deepObject parameters must be exploded") + } + for _, k := range sortedKeys(fieldDict) { + v := fieldDict[k] + part := fmt.Sprintf("%s[%s]=%s", paramName, k, v) + parts = append(parts, part) + } + separator = "&" + } + default: + return "", fmt.Errorf("unsupported style '%s'", style) + } + + return prefix + strings.Join(parts, separator), nil +} + +func stylePrimitive(style string, explode bool, paramName string, paramLocation ParamLocation, value interface{}) (string, error) { + strVal, err := primitiveToString(value) + if err != nil { + return "", err + } + + var prefix string + switch style { + case "simple": + case "label": + prefix = "." + case "matrix": + prefix = fmt.Sprintf(";%s=", paramName) + case "form": + prefix = fmt.Sprintf("%s=", paramName) + default: + return "", fmt.Errorf("unsupported style '%s'", style) + } + return prefix + escapeParameterString(strVal, paramLocation), nil +} + +// Converts a primitive value to a string. We need to do this based on the +// Kind of an interface, not the Type to work with aliased types. +func primitiveToString(value interface{}) (string, error) { + var output string + + // sometimes time and date used like primitive types + // it can happen if paramether is object and has time or date as field + if res, ok := marshalKnownTypes(value); ok { + return res, nil + } + + // Values may come in by pointer for optionals, so make sure to dereferene. + v := reflect.Indirect(reflect.ValueOf(value)) + t := v.Type() + kind := t.Kind() + + switch kind { + case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int: + output = strconv.FormatInt(v.Int(), 10) + case reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint: + output = strconv.FormatUint(v.Uint(), 10) + case reflect.Float64: + output = strconv.FormatFloat(v.Float(), 'f', -1, 64) + case reflect.Float32: + output = strconv.FormatFloat(v.Float(), 'f', -1, 32) + case reflect.Bool: + if v.Bool() { + output = "true" + } else { + output = "false" + } + case reflect.String: + output = v.String() + case reflect.Struct: + // If input has Marshaler, such as object has Additional Property or AnyOf, + // We use this Marshaler and convert into interface{} before styling. + if m, ok := value.(json.Marshaler); ok { + buf, err := m.MarshalJSON() + if err != nil { + return "", fmt.Errorf("failed to marshal input to JSON: %w", err) + } + e := json.NewDecoder(bytes.NewReader(buf)) + e.UseNumber() + var i2 interface{} + err = e.Decode(&i2) + if err != nil { + return "", fmt.Errorf("failed to unmarshal JSON: %w", err) + } + output, err = primitiveToString(i2) + if err != nil { + return "", fmt.Errorf("error convert JSON structure: %w", err) + } + break + } + fallthrough + default: + v, ok := value.(fmt.Stringer) + if !ok { + return "", fmt.Errorf("unsupported type %s", reflect.TypeOf(value).String()) + } + + output = v.String() + } + return output, nil +} + +// This function escapes a parameter value bas on the location of that parameter. +// Query params and path params need different kinds of escaping, while header +// and cookie params seem not to need escaping. +func escapeParameterString(value string, paramLocation ParamLocation) string { + switch paramLocation { + case ParamLocationQuery: + return url.QueryEscape(value) + case ParamLocationPath: + return url.PathEscape(value) + default: + return value + } +} diff --git a/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/types/date.go b/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/types/date.go new file mode 100644 index 00000000..6ad852f6 --- /dev/null +++ b/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/types/date.go @@ -0,0 +1,43 @@ +package types + +import ( + "encoding/json" + "time" +) + +const DateFormat = "2006-01-02" + +type Date struct { + time.Time +} + +func (d Date) MarshalJSON() ([]byte, error) { + return json.Marshal(d.Time.Format(DateFormat)) +} + +func (d *Date) UnmarshalJSON(data []byte) error { + var dateStr string + err := json.Unmarshal(data, &dateStr) + if err != nil { + return err + } + parsed, err := time.Parse(DateFormat, dateStr) + if err != nil { + return err + } + d.Time = parsed + return nil +} + +func (d Date) String() string { + return d.Time.Format(DateFormat) +} + +func (d *Date) UnmarshalText(data []byte) error { + parsed, err := time.Parse(DateFormat, string(data)) + if err != nil { + return err + } + d.Time = parsed + return nil +} diff --git a/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/types/email.go b/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/types/email.go new file mode 100644 index 00000000..00a4cf6b --- /dev/null +++ b/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/types/email.go @@ -0,0 +1,27 @@ +package types + +import ( + "encoding/json" + "errors" +) + +type Email string + +func (e Email) MarshalJSON() ([]byte, error) { + if !emailRegex.MatchString(string(e)) { + return nil, errors.New("email: failed to pass regex validation") + } + return json.Marshal(string(e)) +} + +func (e *Email) UnmarshalJSON(data []byte) error { + var s string + if err := json.Unmarshal(data, &s); err != nil { + return err + } + if !emailRegex.MatchString(s) { + return errors.New("email: failed to pass regex validation") + } + *e = Email(s) + return nil +} diff --git a/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/types/file.go b/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/types/file.go new file mode 100644 index 00000000..7b26a0d7 --- /dev/null +++ b/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/types/file.go @@ -0,0 +1,71 @@ +package types + +import ( + "bytes" + "encoding/json" + "io" + "mime/multipart" +) + +type File struct { + multipart *multipart.FileHeader + data []byte + filename string +} + +func (file *File) InitFromMultipart(header *multipart.FileHeader) { + file.multipart = header + file.data = nil + file.filename = "" +} + +func (file *File) InitFromBytes(data []byte, filename string) { + file.data = data + file.filename = filename + file.multipart = nil +} + +func (file File) MarshalJSON() ([]byte, error) { + b, err := file.Bytes() + if err != nil { + return nil, err + } + return json.Marshal(b) +} + +func (file *File) UnmarshalJSON(data []byte) error { + return json.Unmarshal(data, &file.data) +} + +func (file File) Bytes() ([]byte, error) { + if file.multipart != nil { + f, err := file.multipart.Open() + if err != nil { + return nil, err + } + defer func() { _ = f.Close() }() + return io.ReadAll(f) + } + return file.data, nil +} + +func (file File) Reader() (io.ReadCloser, error) { + if file.multipart != nil { + return file.multipart.Open() + } + return io.NopCloser(bytes.NewReader(file.data)), nil +} + +func (file File) Filename() string { + if file.multipart != nil { + return file.multipart.Filename + } + return file.filename +} + +func (file File) FileSize() int64 { + if file.multipart != nil { + return file.multipart.Size + } + return int64(len(file.data)) +} diff --git a/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/types/regexes.go b/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/types/regexes.go new file mode 100644 index 00000000..94f17df5 --- /dev/null +++ b/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/types/regexes.go @@ -0,0 +1,11 @@ +package types + +import "regexp" + +const ( + emailRegexString = "^(?:(?:(?:(?:[a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+(?:\\.([a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+)*)|(?:(?:\\x22)(?:(?:(?:(?:\\x20|\\x09)*(?:\\x0d\\x0a))?(?:\\x20|\\x09)+)?(?:(?:[\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x7f]|\\x21|[\\x23-\\x5b]|[\\x5d-\\x7e]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(?:(?:[\\x01-\\x09\\x0b\\x0c\\x0d-\\x7f]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}]))))*(?:(?:(?:\\x20|\\x09)*(?:\\x0d\\x0a))?(\\x20|\\x09)+)?(?:\\x22))))@(?:(?:(?:[a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(?:(?:[a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])(?:[a-zA-Z]|\\d|-|\\.|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*(?:[a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.)+(?:(?:[a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(?:(?:[a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])(?:[a-zA-Z]|\\d|-|\\.|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*(?:[a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.?$" +) + +var ( + emailRegex = regexp.MustCompile(emailRegexString) +) diff --git a/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/types/uuid.go b/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/types/uuid.go new file mode 100644 index 00000000..1c11f38b --- /dev/null +++ b/index/server/vendor/github.com/deepmap/oapi-codegen/pkg/types/uuid.go @@ -0,0 +1,7 @@ +package types + +import ( + "github.com/google/uuid" +) + +type UUID = uuid.UUID diff --git a/index/server/vendor/github.com/getkin/kin-openapi/LICENSE b/index/server/vendor/github.com/getkin/kin-openapi/LICENSE new file mode 100644 index 00000000..992b9831 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017-2018 the project authors. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/callback.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/callback.go new file mode 100644 index 00000000..62cea72d --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/callback.go @@ -0,0 +1,48 @@ +package openapi3 + +import ( + "context" + "fmt" + "sort" + + "github.com/go-openapi/jsonpointer" +) + +type Callbacks map[string]*CallbackRef + +var _ jsonpointer.JSONPointable = (*Callbacks)(nil) + +// JSONLookup implements github.com/go-openapi/jsonpointer#JSONPointable +func (c Callbacks) JSONLookup(token string) (interface{}, error) { + ref, ok := c[token] + if ref == nil || !ok { + return nil, fmt.Errorf("object has no field %q", token) + } + + if ref.Ref != "" { + return &Ref{Ref: ref.Ref}, nil + } + return ref.Value, nil +} + +// Callback is specified by OpenAPI/Swagger standard version 3. +// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#callback-object +type Callback map[string]*PathItem + +// Validate returns an error if Callback does not comply with the OpenAPI spec. +func (callback Callback) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + + keys := make([]string, 0, len(callback)) + for key := range callback { + keys = append(keys, key) + } + sort.Strings(keys) + for _, key := range keys { + v := callback[key] + if err := v.Validate(ctx); err != nil { + return err + } + } + return nil +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/components.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/components.go new file mode 100644 index 00000000..0981e8bf --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/components.go @@ -0,0 +1,242 @@ +package openapi3 + +import ( + "context" + "encoding/json" + "fmt" + "regexp" + "sort" +) + +// Components is specified by OpenAPI/Swagger standard version 3. +// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#components-object +type Components struct { + Extensions map[string]interface{} `json:"-" yaml:"-"` + + Schemas Schemas `json:"schemas,omitempty" yaml:"schemas,omitempty"` + Parameters ParametersMap `json:"parameters,omitempty" yaml:"parameters,omitempty"` + Headers Headers `json:"headers,omitempty" yaml:"headers,omitempty"` + RequestBodies RequestBodies `json:"requestBodies,omitempty" yaml:"requestBodies,omitempty"` + Responses Responses `json:"responses,omitempty" yaml:"responses,omitempty"` + SecuritySchemes SecuritySchemes `json:"securitySchemes,omitempty" yaml:"securitySchemes,omitempty"` + Examples Examples `json:"examples,omitempty" yaml:"examples,omitempty"` + Links Links `json:"links,omitempty" yaml:"links,omitempty"` + Callbacks Callbacks `json:"callbacks,omitempty" yaml:"callbacks,omitempty"` +} + +func NewComponents() Components { + return Components{} +} + +// MarshalJSON returns the JSON encoding of Components. +func (components Components) MarshalJSON() ([]byte, error) { + m := make(map[string]interface{}, 9+len(components.Extensions)) + for k, v := range components.Extensions { + m[k] = v + } + if x := components.Schemas; len(x) != 0 { + m["schemas"] = x + } + if x := components.Parameters; len(x) != 0 { + m["parameters"] = x + } + if x := components.Headers; len(x) != 0 { + m["headers"] = x + } + if x := components.RequestBodies; len(x) != 0 { + m["requestBodies"] = x + } + if x := components.Responses; len(x) != 0 { + m["responses"] = x + } + if x := components.SecuritySchemes; len(x) != 0 { + m["securitySchemes"] = x + } + if x := components.Examples; len(x) != 0 { + m["examples"] = x + } + if x := components.Links; len(x) != 0 { + m["links"] = x + } + if x := components.Callbacks; len(x) != 0 { + m["callbacks"] = x + } + return json.Marshal(m) +} + +// UnmarshalJSON sets Components to a copy of data. +func (components *Components) UnmarshalJSON(data []byte) error { + type ComponentsBis Components + var x ComponentsBis + if err := json.Unmarshal(data, &x); err != nil { + return err + } + _ = json.Unmarshal(data, &x.Extensions) + delete(x.Extensions, "schemas") + delete(x.Extensions, "parameters") + delete(x.Extensions, "headers") + delete(x.Extensions, "requestBodies") + delete(x.Extensions, "responses") + delete(x.Extensions, "securitySchemes") + delete(x.Extensions, "examples") + delete(x.Extensions, "links") + delete(x.Extensions, "callbacks") + *components = Components(x) + return nil +} + +// Validate returns an error if Components does not comply with the OpenAPI spec. +func (components *Components) Validate(ctx context.Context, opts ...ValidationOption) (err error) { + ctx = WithValidationOptions(ctx, opts...) + + schemas := make([]string, 0, len(components.Schemas)) + for name := range components.Schemas { + schemas = append(schemas, name) + } + sort.Strings(schemas) + for _, k := range schemas { + v := components.Schemas[k] + if err = ValidateIdentifier(k); err != nil { + return fmt.Errorf("schema %q: %w", k, err) + } + if err = v.Validate(ctx); err != nil { + return fmt.Errorf("schema %q: %w", k, err) + } + } + + parameters := make([]string, 0, len(components.Parameters)) + for name := range components.Parameters { + parameters = append(parameters, name) + } + sort.Strings(parameters) + for _, k := range parameters { + v := components.Parameters[k] + if err = ValidateIdentifier(k); err != nil { + return fmt.Errorf("parameter %q: %w", k, err) + } + if err = v.Validate(ctx); err != nil { + return fmt.Errorf("parameter %q: %w", k, err) + } + } + + requestBodies := make([]string, 0, len(components.RequestBodies)) + for name := range components.RequestBodies { + requestBodies = append(requestBodies, name) + } + sort.Strings(requestBodies) + for _, k := range requestBodies { + v := components.RequestBodies[k] + if err = ValidateIdentifier(k); err != nil { + return fmt.Errorf("request body %q: %w", k, err) + } + if err = v.Validate(ctx); err != nil { + return fmt.Errorf("request body %q: %w", k, err) + } + } + + responses := make([]string, 0, len(components.Responses)) + for name := range components.Responses { + responses = append(responses, name) + } + sort.Strings(responses) + for _, k := range responses { + v := components.Responses[k] + if err = ValidateIdentifier(k); err != nil { + return fmt.Errorf("response %q: %w", k, err) + } + if err = v.Validate(ctx); err != nil { + return fmt.Errorf("response %q: %w", k, err) + } + } + + headers := make([]string, 0, len(components.Headers)) + for name := range components.Headers { + headers = append(headers, name) + } + sort.Strings(headers) + for _, k := range headers { + v := components.Headers[k] + if err = ValidateIdentifier(k); err != nil { + return fmt.Errorf("header %q: %w", k, err) + } + if err = v.Validate(ctx); err != nil { + return fmt.Errorf("header %q: %w", k, err) + } + } + + securitySchemes := make([]string, 0, len(components.SecuritySchemes)) + for name := range components.SecuritySchemes { + securitySchemes = append(securitySchemes, name) + } + sort.Strings(securitySchemes) + for _, k := range securitySchemes { + v := components.SecuritySchemes[k] + if err = ValidateIdentifier(k); err != nil { + return fmt.Errorf("security scheme %q: %w", k, err) + } + if err = v.Validate(ctx); err != nil { + return fmt.Errorf("security scheme %q: %w", k, err) + } + } + + examples := make([]string, 0, len(components.Examples)) + for name := range components.Examples { + examples = append(examples, name) + } + sort.Strings(examples) + for _, k := range examples { + v := components.Examples[k] + if err = ValidateIdentifier(k); err != nil { + return fmt.Errorf("example %q: %w", k, err) + } + if err = v.Validate(ctx); err != nil { + return fmt.Errorf("example %q: %w", k, err) + } + } + + links := make([]string, 0, len(components.Links)) + for name := range components.Links { + links = append(links, name) + } + sort.Strings(links) + for _, k := range links { + v := components.Links[k] + if err = ValidateIdentifier(k); err != nil { + return fmt.Errorf("link %q: %w", k, err) + } + if err = v.Validate(ctx); err != nil { + return fmt.Errorf("link %q: %w", k, err) + } + } + + callbacks := make([]string, 0, len(components.Callbacks)) + for name := range components.Callbacks { + callbacks = append(callbacks, name) + } + sort.Strings(callbacks) + for _, k := range callbacks { + v := components.Callbacks[k] + if err = ValidateIdentifier(k); err != nil { + return fmt.Errorf("callback %q: %w", k, err) + } + if err = v.Validate(ctx); err != nil { + return fmt.Errorf("callback %q: %w", k, err) + } + } + + return validateExtensions(ctx, components.Extensions) +} + +const identifierPattern = `^[a-zA-Z0-9._-]+$` + +// IdentifierRegExp verifies whether Component object key matches 'identifierPattern' pattern, according to OapiAPI v3.x.0. +// Hovever, to be able supporting legacy OpenAPI v2.x, there is a need to customize above pattern in orde not to fail +// converted v2-v3 validation +var IdentifierRegExp = regexp.MustCompile(identifierPattern) + +func ValidateIdentifier(value string) error { + if IdentifierRegExp.MatchString(value) { + return nil + } + return fmt.Errorf("identifier %q is not supported by OpenAPIv3 standard (regexp: %q)", value, identifierPattern) +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/content.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/content.go new file mode 100644 index 00000000..81b070ee --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/content.go @@ -0,0 +1,124 @@ +package openapi3 + +import ( + "context" + "sort" + "strings" +) + +// Content is specified by OpenAPI/Swagger 3.0 standard. +type Content map[string]*MediaType + +func NewContent() Content { + return make(map[string]*MediaType) +} + +func NewContentWithSchema(schema *Schema, consumes []string) Content { + if len(consumes) == 0 { + return Content{ + "*/*": NewMediaType().WithSchema(schema), + } + } + content := make(map[string]*MediaType, len(consumes)) + for _, mediaType := range consumes { + content[mediaType] = NewMediaType().WithSchema(schema) + } + return content +} + +func NewContentWithSchemaRef(schema *SchemaRef, consumes []string) Content { + if len(consumes) == 0 { + return Content{ + "*/*": NewMediaType().WithSchemaRef(schema), + } + } + content := make(map[string]*MediaType, len(consumes)) + for _, mediaType := range consumes { + content[mediaType] = NewMediaType().WithSchemaRef(schema) + } + return content +} + +func NewContentWithJSONSchema(schema *Schema) Content { + return Content{ + "application/json": NewMediaType().WithSchema(schema), + } +} +func NewContentWithJSONSchemaRef(schema *SchemaRef) Content { + return Content{ + "application/json": NewMediaType().WithSchemaRef(schema), + } +} + +func NewContentWithFormDataSchema(schema *Schema) Content { + return Content{ + "multipart/form-data": NewMediaType().WithSchema(schema), + } +} + +func NewContentWithFormDataSchemaRef(schema *SchemaRef) Content { + return Content{ + "multipart/form-data": NewMediaType().WithSchemaRef(schema), + } +} + +func (content Content) Get(mime string) *MediaType { + // If the mime is empty then short-circuit to the wildcard. + // We do this here so that we catch only the specific case of + // and empty mime rather than a present, but invalid, mime type. + if mime == "" { + return content["*/*"] + } + // Start by making the most specific match possible + // by using the mime type in full. + if v := content[mime]; v != nil { + return v + } + // If an exact match is not found then we strip all + // metadata from the mime type and only use the x/y + // portion. + i := strings.IndexByte(mime, ';') + if i < 0 { + // If there is no metadata then preserve the full mime type + // string for later wildcard searches. + i = len(mime) + } + mime = mime[:i] + if v := content[mime]; v != nil { + return v + } + // If the x/y pattern has no specific match then we + // try the x/* pattern. + i = strings.IndexByte(mime, '/') + if i < 0 { + // In the case that the given mime type is not valid because it is + // missing the subtype we return nil so that this does not accidentally + // resolve with the wildcard. + return nil + } + mime = mime[:i] + "/*" + if v := content[mime]; v != nil { + return v + } + // Finally, the most generic match of */* is returned + // as a catch-all. + return content["*/*"] +} + +// Validate returns an error if Content does not comply with the OpenAPI spec. +func (content Content) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + + keys := make([]string, 0, len(content)) + for key := range content { + keys = append(keys, key) + } + sort.Strings(keys) + for _, k := range keys { + v := content[k] + if err := v.Validate(ctx); err != nil { + return err + } + } + return nil +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/discriminator.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/discriminator.go new file mode 100644 index 00000000..8b6b813f --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/discriminator.go @@ -0,0 +1,49 @@ +package openapi3 + +import ( + "context" + "encoding/json" +) + +// Discriminator is specified by OpenAPI/Swagger standard version 3. +// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#discriminator-object +type Discriminator struct { + Extensions map[string]interface{} `json:"-" yaml:"-"` + + PropertyName string `json:"propertyName" yaml:"propertyName"` // required + Mapping map[string]string `json:"mapping,omitempty" yaml:"mapping,omitempty"` +} + +// MarshalJSON returns the JSON encoding of Discriminator. +func (discriminator Discriminator) MarshalJSON() ([]byte, error) { + m := make(map[string]interface{}, 2+len(discriminator.Extensions)) + for k, v := range discriminator.Extensions { + m[k] = v + } + m["propertyName"] = discriminator.PropertyName + if x := discriminator.Mapping; len(x) != 0 { + m["mapping"] = x + } + return json.Marshal(m) +} + +// UnmarshalJSON sets Discriminator to a copy of data. +func (discriminator *Discriminator) UnmarshalJSON(data []byte) error { + type DiscriminatorBis Discriminator + var x DiscriminatorBis + if err := json.Unmarshal(data, &x); err != nil { + return err + } + _ = json.Unmarshal(data, &x.Extensions) + delete(x.Extensions, "propertyName") + delete(x.Extensions, "mapping") + *discriminator = Discriminator(x) + return nil +} + +// Validate returns an error if Discriminator does not comply with the OpenAPI spec. +func (discriminator *Discriminator) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + + return validateExtensions(ctx, discriminator.Extensions) +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/doc.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/doc.go new file mode 100644 index 00000000..41c9965c --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/doc.go @@ -0,0 +1,4 @@ +// Package openapi3 parses and writes OpenAPI 3 specification documents. +// +// See https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.3.md +package openapi3 diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/encoding.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/encoding.go new file mode 100644 index 00000000..dc2e5443 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/encoding.go @@ -0,0 +1,136 @@ +package openapi3 + +import ( + "context" + "encoding/json" + "fmt" + "sort" +) + +// Encoding is specified by OpenAPI/Swagger 3.0 standard. +// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#encoding-object +type Encoding struct { + Extensions map[string]interface{} `json:"-" yaml:"-"` + + ContentType string `json:"contentType,omitempty" yaml:"contentType,omitempty"` + Headers Headers `json:"headers,omitempty" yaml:"headers,omitempty"` + Style string `json:"style,omitempty" yaml:"style,omitempty"` + Explode *bool `json:"explode,omitempty" yaml:"explode,omitempty"` + AllowReserved bool `json:"allowReserved,omitempty" yaml:"allowReserved,omitempty"` +} + +func NewEncoding() *Encoding { + return &Encoding{} +} + +func (encoding *Encoding) WithHeader(name string, header *Header) *Encoding { + return encoding.WithHeaderRef(name, &HeaderRef{ + Value: header, + }) +} + +func (encoding *Encoding) WithHeaderRef(name string, ref *HeaderRef) *Encoding { + headers := encoding.Headers + if headers == nil { + headers = make(map[string]*HeaderRef) + encoding.Headers = headers + } + headers[name] = ref + return encoding +} + +// MarshalJSON returns the JSON encoding of Encoding. +func (encoding Encoding) MarshalJSON() ([]byte, error) { + m := make(map[string]interface{}, 5+len(encoding.Extensions)) + for k, v := range encoding.Extensions { + m[k] = v + } + if x := encoding.ContentType; x != "" { + m["contentType"] = x + } + if x := encoding.Headers; len(x) != 0 { + m["headers"] = x + } + if x := encoding.Style; x != "" { + m["style"] = x + } + if x := encoding.Explode; x != nil { + m["explode"] = x + } + if x := encoding.AllowReserved; x { + m["allowReserved"] = x + } + return json.Marshal(m) +} + +// UnmarshalJSON sets Encoding to a copy of data. +func (encoding *Encoding) UnmarshalJSON(data []byte) error { + type EncodingBis Encoding + var x EncodingBis + if err := json.Unmarshal(data, &x); err != nil { + return err + } + _ = json.Unmarshal(data, &x.Extensions) + delete(x.Extensions, "contentType") + delete(x.Extensions, "headers") + delete(x.Extensions, "style") + delete(x.Extensions, "explode") + delete(x.Extensions, "allowReserved") + *encoding = Encoding(x) + return nil +} + +// SerializationMethod returns a serialization method of request body. +// When serialization method is not defined the method returns the default serialization method. +func (encoding *Encoding) SerializationMethod() *SerializationMethod { + sm := &SerializationMethod{Style: SerializationForm, Explode: true} + if encoding != nil { + if encoding.Style != "" { + sm.Style = encoding.Style + } + if encoding.Explode != nil { + sm.Explode = *encoding.Explode + } + } + return sm +} + +// Validate returns an error if Encoding does not comply with the OpenAPI spec. +func (encoding *Encoding) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + + if encoding == nil { + return nil + } + + headers := make([]string, 0, len(encoding.Headers)) + for k := range encoding.Headers { + headers = append(headers, k) + } + sort.Strings(headers) + for _, k := range headers { + v := encoding.Headers[k] + if err := ValidateIdentifier(k); err != nil { + return nil + } + if err := v.Validate(ctx); err != nil { + return nil + } + } + + // Validate a media types's serialization method. + sm := encoding.SerializationMethod() + switch { + case sm.Style == SerializationForm && sm.Explode, + sm.Style == SerializationForm && !sm.Explode, + sm.Style == SerializationSpaceDelimited && sm.Explode, + sm.Style == SerializationSpaceDelimited && !sm.Explode, + sm.Style == SerializationPipeDelimited && sm.Explode, + sm.Style == SerializationPipeDelimited && !sm.Explode, + sm.Style == SerializationDeepObject && sm.Explode: + default: + return fmt.Errorf("serialization method with style=%q and explode=%v is not supported by media type", sm.Style, sm.Explode) + } + + return validateExtensions(ctx, encoding.Extensions) +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/errors.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/errors.go new file mode 100644 index 00000000..74baab9a --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/errors.go @@ -0,0 +1,59 @@ +package openapi3 + +import ( + "bytes" + "errors" +) + +// MultiError is a collection of errors, intended for when +// multiple issues need to be reported upstream +type MultiError []error + +func (me MultiError) Error() string { + return spliceErr(" | ", me) +} + +func spliceErr(sep string, errs []error) string { + buff := &bytes.Buffer{} + for i, e := range errs { + buff.WriteString(e.Error()) + if i != len(errs)-1 { + buff.WriteString(sep) + } + } + return buff.String() +} + +// Is allows you to determine if a generic error is in fact a MultiError using `errors.Is()` +// It will also return true if any of the contained errors match target +func (me MultiError) Is(target error) bool { + if _, ok := target.(MultiError); ok { + return true + } + for _, e := range me { + if errors.Is(e, target) { + return true + } + } + return false +} + +// As allows you to use `errors.As()` to set target to the first error within the multi error that matches the target type +func (me MultiError) As(target interface{}) bool { + for _, e := range me { + if errors.As(e, target) { + return true + } + } + return false +} + +type multiErrorForOneOf MultiError + +func (meo multiErrorForOneOf) Error() string { + return spliceErr(" Or ", meo) +} + +func (meo multiErrorForOneOf) Unwrap() error { + return MultiError(meo) +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/example.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/example.go new file mode 100644 index 00000000..04338bee --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/example.go @@ -0,0 +1,93 @@ +package openapi3 + +import ( + "context" + "encoding/json" + "errors" + "fmt" + + "github.com/go-openapi/jsonpointer" +) + +type Examples map[string]*ExampleRef + +var _ jsonpointer.JSONPointable = (*Examples)(nil) + +// JSONLookup implements github.com/go-openapi/jsonpointer#JSONPointable +func (e Examples) JSONLookup(token string) (interface{}, error) { + ref, ok := e[token] + if ref == nil || !ok { + return nil, fmt.Errorf("object has no field %q", token) + } + + if ref.Ref != "" { + return &Ref{Ref: ref.Ref}, nil + } + return ref.Value, nil +} + +// Example is specified by OpenAPI/Swagger 3.0 standard. +// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#example-object +type Example struct { + Extensions map[string]interface{} `json:"-" yaml:"-"` + + Summary string `json:"summary,omitempty" yaml:"summary,omitempty"` + Description string `json:"description,omitempty" yaml:"description,omitempty"` + Value interface{} `json:"value,omitempty" yaml:"value,omitempty"` + ExternalValue string `json:"externalValue,omitempty" yaml:"externalValue,omitempty"` +} + +func NewExample(value interface{}) *Example { + return &Example{Value: value} +} + +// MarshalJSON returns the JSON encoding of Example. +func (example Example) MarshalJSON() ([]byte, error) { + m := make(map[string]interface{}, 4+len(example.Extensions)) + for k, v := range example.Extensions { + m[k] = v + } + if x := example.Summary; x != "" { + m["summary"] = x + } + if x := example.Description; x != "" { + m["description"] = x + } + if x := example.Value; x != nil { + m["value"] = x + } + if x := example.ExternalValue; x != "" { + m["externalValue"] = x + } + return json.Marshal(m) +} + +// UnmarshalJSON sets Example to a copy of data. +func (example *Example) UnmarshalJSON(data []byte) error { + type ExampleBis Example + var x ExampleBis + if err := json.Unmarshal(data, &x); err != nil { + return err + } + _ = json.Unmarshal(data, &x.Extensions) + delete(x.Extensions, "summary") + delete(x.Extensions, "description") + delete(x.Extensions, "value") + delete(x.Extensions, "externalValue") + *example = Example(x) + return nil +} + +// Validate returns an error if Example does not comply with the OpenAPI spec. +func (example *Example) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + + if example.Value != nil && example.ExternalValue != "" { + return errors.New("value and externalValue are mutually exclusive") + } + if example.Value == nil && example.ExternalValue == "" { + return errors.New("no value or externalValue field") + } + + return validateExtensions(ctx, example.Extensions) +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/example_validation.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/example_validation.go new file mode 100644 index 00000000..fb7a1da1 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/example_validation.go @@ -0,0 +1,16 @@ +package openapi3 + +import "context" + +func validateExampleValue(ctx context.Context, input interface{}, schema *Schema) error { + opts := make([]SchemaValidationOption, 0, 2) + + if vo := getValidationOptions(ctx); vo.examplesValidationAsReq { + opts = append(opts, VisitAsRequest()) + } else if vo.examplesValidationAsRes { + opts = append(opts, VisitAsResponse()) + } + opts = append(opts, MultiErrors()) + + return schema.VisitJSON(input, opts...) +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/extension.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/extension.go new file mode 100644 index 00000000..c2995909 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/extension.go @@ -0,0 +1,24 @@ +package openapi3 + +import ( + "context" + "fmt" + "sort" + "strings" +) + +func validateExtensions(ctx context.Context, extensions map[string]interface{}) error { // FIXME: newtype + Validate(...) + var unknowns []string + for k := range extensions { + if !strings.HasPrefix(k, "x-") { + unknowns = append(unknowns, k) + } + } + + if len(unknowns) != 0 { + sort.Strings(unknowns) + return fmt.Errorf("extra sibling fields: %+v", unknowns) + } + + return nil +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/external_docs.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/external_docs.go new file mode 100644 index 00000000..276a36cc --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/external_docs.go @@ -0,0 +1,61 @@ +package openapi3 + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "net/url" +) + +// ExternalDocs is specified by OpenAPI/Swagger standard version 3. +// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#external-documentation-object +type ExternalDocs struct { + Extensions map[string]interface{} `json:"-" yaml:"-"` + + Description string `json:"description,omitempty" yaml:"description,omitempty"` + URL string `json:"url,omitempty" yaml:"url,omitempty"` +} + +// MarshalJSON returns the JSON encoding of ExternalDocs. +func (e ExternalDocs) MarshalJSON() ([]byte, error) { + m := make(map[string]interface{}, 2+len(e.Extensions)) + for k, v := range e.Extensions { + m[k] = v + } + if x := e.Description; x != "" { + m["description"] = x + } + if x := e.URL; x != "" { + m["url"] = x + } + return json.Marshal(m) +} + +// UnmarshalJSON sets ExternalDocs to a copy of data. +func (e *ExternalDocs) UnmarshalJSON(data []byte) error { + type ExternalDocsBis ExternalDocs + var x ExternalDocsBis + if err := json.Unmarshal(data, &x); err != nil { + return err + } + _ = json.Unmarshal(data, &x.Extensions) + delete(x.Extensions, "description") + delete(x.Extensions, "url") + *e = ExternalDocs(x) + return nil +} + +// Validate returns an error if ExternalDocs does not comply with the OpenAPI spec. +func (e *ExternalDocs) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + + if e.URL == "" { + return errors.New("url is required") + } + if _, err := url.Parse(e.URL); err != nil { + return fmt.Errorf("url is incorrect: %w", err) + } + + return validateExtensions(ctx, e.Extensions) +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/header.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/header.go new file mode 100644 index 00000000..8bce69f2 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/header.go @@ -0,0 +1,103 @@ +package openapi3 + +import ( + "context" + "errors" + "fmt" + + "github.com/go-openapi/jsonpointer" +) + +type Headers map[string]*HeaderRef + +var _ jsonpointer.JSONPointable = (*Headers)(nil) + +// JSONLookup implements github.com/go-openapi/jsonpointer#JSONPointable +func (h Headers) JSONLookup(token string) (interface{}, error) { + ref, ok := h[token] + if ref == nil || !ok { + return nil, fmt.Errorf("object has no field %q", token) + } + + if ref.Ref != "" { + return &Ref{Ref: ref.Ref}, nil + } + return ref.Value, nil +} + +// Header is specified by OpenAPI/Swagger 3.0 standard. +// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#header-object +type Header struct { + Parameter +} + +var _ jsonpointer.JSONPointable = (*Header)(nil) + +// JSONLookup implements github.com/go-openapi/jsonpointer#JSONPointable +func (header Header) JSONLookup(token string) (interface{}, error) { + return header.Parameter.JSONLookup(token) +} + +// MarshalJSON returns the JSON encoding of Header. +func (header Header) MarshalJSON() ([]byte, error) { + return header.Parameter.MarshalJSON() +} + +// UnmarshalJSON sets Header to a copy of data. +func (header *Header) UnmarshalJSON(data []byte) error { + return header.Parameter.UnmarshalJSON(data) +} + +// SerializationMethod returns a header's serialization method. +func (header *Header) SerializationMethod() (*SerializationMethod, error) { + style := header.Style + if style == "" { + style = SerializationSimple + } + explode := false + if header.Explode != nil { + explode = *header.Explode + } + return &SerializationMethod{Style: style, Explode: explode}, nil +} + +// Validate returns an error if Header does not comply with the OpenAPI spec. +func (header *Header) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + + if header.Name != "" { + return errors.New("header 'name' MUST NOT be specified, it is given in the corresponding headers map") + } + if header.In != "" { + return errors.New("header 'in' MUST NOT be specified, it is implicitly in header") + } + + // Validate a parameter's serialization method. + sm, err := header.SerializationMethod() + if err != nil { + return err + } + if smSupported := false || + sm.Style == SerializationSimple && !sm.Explode || + sm.Style == SerializationSimple && sm.Explode; !smSupported { + e := fmt.Errorf("serialization method with style=%q and explode=%v is not supported by a header parameter", sm.Style, sm.Explode) + return fmt.Errorf("header schema is invalid: %w", e) + } + + if (header.Schema == nil) == (header.Content == nil) { + e := fmt.Errorf("parameter must contain exactly one of content and schema: %v", header) + return fmt.Errorf("header schema is invalid: %w", e) + } + if schema := header.Schema; schema != nil { + if err := schema.Validate(ctx); err != nil { + return fmt.Errorf("header schema is invalid: %w", err) + } + } + + if content := header.Content; content != nil { + if err := content.Validate(ctx); err != nil { + return fmt.Errorf("header content is invalid: %w", err) + } + } + return nil +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/info.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/info.go new file mode 100644 index 00000000..381047fc --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/info.go @@ -0,0 +1,185 @@ +package openapi3 + +import ( + "context" + "encoding/json" + "errors" +) + +// Info is specified by OpenAPI/Swagger standard version 3. +// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#info-object +type Info struct { + Extensions map[string]interface{} `json:"-" yaml:"-"` + + Title string `json:"title" yaml:"title"` // Required + Description string `json:"description,omitempty" yaml:"description,omitempty"` + TermsOfService string `json:"termsOfService,omitempty" yaml:"termsOfService,omitempty"` + Contact *Contact `json:"contact,omitempty" yaml:"contact,omitempty"` + License *License `json:"license,omitempty" yaml:"license,omitempty"` + Version string `json:"version" yaml:"version"` // Required +} + +// MarshalJSON returns the JSON encoding of Info. +func (info Info) MarshalJSON() ([]byte, error) { + m := make(map[string]interface{}, 6+len(info.Extensions)) + for k, v := range info.Extensions { + m[k] = v + } + m["title"] = info.Title + if x := info.Description; x != "" { + m["description"] = x + } + if x := info.TermsOfService; x != "" { + m["termsOfService"] = x + } + if x := info.Contact; x != nil { + m["contact"] = x + } + if x := info.License; x != nil { + m["license"] = x + } + m["version"] = info.Version + return json.Marshal(m) +} + +// UnmarshalJSON sets Info to a copy of data. +func (info *Info) UnmarshalJSON(data []byte) error { + type InfoBis Info + var x InfoBis + if err := json.Unmarshal(data, &x); err != nil { + return err + } + _ = json.Unmarshal(data, &x.Extensions) + delete(x.Extensions, "title") + delete(x.Extensions, "description") + delete(x.Extensions, "termsOfService") + delete(x.Extensions, "contact") + delete(x.Extensions, "license") + delete(x.Extensions, "version") + *info = Info(x) + return nil +} + +// Validate returns an error if Info does not comply with the OpenAPI spec. +func (info *Info) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + + if contact := info.Contact; contact != nil { + if err := contact.Validate(ctx); err != nil { + return err + } + } + + if license := info.License; license != nil { + if err := license.Validate(ctx); err != nil { + return err + } + } + + if info.Version == "" { + return errors.New("value of version must be a non-empty string") + } + + if info.Title == "" { + return errors.New("value of title must be a non-empty string") + } + + return validateExtensions(ctx, info.Extensions) +} + +// Contact is specified by OpenAPI/Swagger standard version 3. +// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#contact-object +type Contact struct { + Extensions map[string]interface{} `json:"-" yaml:"-"` + + Name string `json:"name,omitempty" yaml:"name,omitempty"` + URL string `json:"url,omitempty" yaml:"url,omitempty"` + Email string `json:"email,omitempty" yaml:"email,omitempty"` +} + +// MarshalJSON returns the JSON encoding of Contact. +func (contact Contact) MarshalJSON() ([]byte, error) { + m := make(map[string]interface{}, 3+len(contact.Extensions)) + for k, v := range contact.Extensions { + m[k] = v + } + if x := contact.Name; x != "" { + m["name"] = x + } + if x := contact.URL; x != "" { + m["url"] = x + } + if x := contact.Email; x != "" { + m["email"] = x + } + return json.Marshal(m) +} + +// UnmarshalJSON sets Contact to a copy of data. +func (contact *Contact) UnmarshalJSON(data []byte) error { + type ContactBis Contact + var x ContactBis + if err := json.Unmarshal(data, &x); err != nil { + return err + } + _ = json.Unmarshal(data, &x.Extensions) + delete(x.Extensions, "name") + delete(x.Extensions, "url") + delete(x.Extensions, "email") + *contact = Contact(x) + return nil +} + +// Validate returns an error if Contact does not comply with the OpenAPI spec. +func (contact *Contact) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + + return validateExtensions(ctx, contact.Extensions) +} + +// License is specified by OpenAPI/Swagger standard version 3. +// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#license-object +type License struct { + Extensions map[string]interface{} `json:"-" yaml:"-"` + + Name string `json:"name" yaml:"name"` // Required + URL string `json:"url,omitempty" yaml:"url,omitempty"` +} + +// MarshalJSON returns the JSON encoding of License. +func (license License) MarshalJSON() ([]byte, error) { + m := make(map[string]interface{}, 2+len(license.Extensions)) + for k, v := range license.Extensions { + m[k] = v + } + m["name"] = license.Name + if x := license.URL; x != "" { + m["url"] = x + } + return json.Marshal(m) +} + +// UnmarshalJSON sets License to a copy of data. +func (license *License) UnmarshalJSON(data []byte) error { + type LicenseBis License + var x LicenseBis + if err := json.Unmarshal(data, &x); err != nil { + return err + } + _ = json.Unmarshal(data, &x.Extensions) + delete(x.Extensions, "name") + delete(x.Extensions, "url") + *license = License(x) + return nil +} + +// Validate returns an error if License does not comply with the OpenAPI spec. +func (license *License) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + + if license.Name == "" { + return errors.New("value of license name must be a non-empty string") + } + + return validateExtensions(ctx, license.Extensions) +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/internalize_refs.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/internalize_refs.go new file mode 100644 index 00000000..b8506535 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/internalize_refs.go @@ -0,0 +1,434 @@ +package openapi3 + +import ( + "context" + "path/filepath" + "strings" +) + +type RefNameResolver func(string) string + +// DefaultRefResolver is a default implementation of refNameResolver for the +// InternalizeRefs function. +// +// If a reference points to an element inside a document, it returns the last +// element in the reference using filepath.Base. Otherwise if the reference points +// to a file, it returns the file name trimmed of all extensions. +func DefaultRefNameResolver(ref string) string { + if ref == "" { + return "" + } + split := strings.SplitN(ref, "#", 2) + if len(split) == 2 { + return filepath.Base(split[1]) + } + ref = split[0] + for ext := filepath.Ext(ref); len(ext) > 0; ext = filepath.Ext(ref) { + ref = strings.TrimSuffix(ref, ext) + } + return filepath.Base(ref) +} + +func schemaNames(s Schemas) []string { + out := make([]string, 0, len(s)) + for i := range s { + out = append(out, i) + } + return out +} + +func parametersMapNames(s ParametersMap) []string { + out := make([]string, 0, len(s)) + for i := range s { + out = append(out, i) + } + return out +} + +func isExternalRef(ref string, parentIsExternal bool) bool { + return ref != "" && (!strings.HasPrefix(ref, "#/components/") || parentIsExternal) +} + +func (doc *T) addSchemaToSpec(s *SchemaRef, refNameResolver RefNameResolver, parentIsExternal bool) bool { + if s == nil || !isExternalRef(s.Ref, parentIsExternal) { + return false + } + + name := refNameResolver(s.Ref) + if doc.Components != nil { + if _, ok := doc.Components.Schemas[name]; ok { + s.Ref = "#/components/schemas/" + name + return true + } + } + + if doc.Components == nil { + doc.Components = &Components{} + } + if doc.Components.Schemas == nil { + doc.Components.Schemas = make(Schemas) + } + doc.Components.Schemas[name] = s.Value.NewRef() + s.Ref = "#/components/schemas/" + name + return true +} + +func (doc *T) addParameterToSpec(p *ParameterRef, refNameResolver RefNameResolver, parentIsExternal bool) bool { + if p == nil || !isExternalRef(p.Ref, parentIsExternal) { + return false + } + name := refNameResolver(p.Ref) + if doc.Components != nil { + if _, ok := doc.Components.Parameters[name]; ok { + p.Ref = "#/components/parameters/" + name + return true + } + } + + if doc.Components == nil { + doc.Components = &Components{} + } + if doc.Components.Parameters == nil { + doc.Components.Parameters = make(ParametersMap) + } + doc.Components.Parameters[name] = &ParameterRef{Value: p.Value} + p.Ref = "#/components/parameters/" + name + return true +} + +func (doc *T) addHeaderToSpec(h *HeaderRef, refNameResolver RefNameResolver, parentIsExternal bool) bool { + if h == nil || !isExternalRef(h.Ref, parentIsExternal) { + return false + } + name := refNameResolver(h.Ref) + if doc.Components != nil { + if _, ok := doc.Components.Headers[name]; ok { + h.Ref = "#/components/headers/" + name + return true + } + } + + if doc.Components == nil { + doc.Components = &Components{} + } + if doc.Components.Headers == nil { + doc.Components.Headers = make(Headers) + } + doc.Components.Headers[name] = &HeaderRef{Value: h.Value} + h.Ref = "#/components/headers/" + name + return true +} + +func (doc *T) addRequestBodyToSpec(r *RequestBodyRef, refNameResolver RefNameResolver, parentIsExternal bool) bool { + if r == nil || !isExternalRef(r.Ref, parentIsExternal) { + return false + } + name := refNameResolver(r.Ref) + if doc.Components != nil { + if _, ok := doc.Components.RequestBodies[name]; ok { + r.Ref = "#/components/requestBodies/" + name + return true + } + } + + if doc.Components == nil { + doc.Components = &Components{} + } + if doc.Components.RequestBodies == nil { + doc.Components.RequestBodies = make(RequestBodies) + } + doc.Components.RequestBodies[name] = &RequestBodyRef{Value: r.Value} + r.Ref = "#/components/requestBodies/" + name + return true +} + +func (doc *T) addResponseToSpec(r *ResponseRef, refNameResolver RefNameResolver, parentIsExternal bool) bool { + if r == nil || !isExternalRef(r.Ref, parentIsExternal) { + return false + } + name := refNameResolver(r.Ref) + if doc.Components != nil { + if _, ok := doc.Components.Responses[name]; ok { + r.Ref = "#/components/responses/" + name + return true + } + } + + if doc.Components == nil { + doc.Components = &Components{} + } + if doc.Components.Responses == nil { + doc.Components.Responses = make(Responses) + } + doc.Components.Responses[name] = &ResponseRef{Value: r.Value} + r.Ref = "#/components/responses/" + name + return true +} + +func (doc *T) addSecuritySchemeToSpec(ss *SecuritySchemeRef, refNameResolver RefNameResolver, parentIsExternal bool) { + if ss == nil || !isExternalRef(ss.Ref, parentIsExternal) { + return + } + name := refNameResolver(ss.Ref) + if doc.Components != nil { + if _, ok := doc.Components.SecuritySchemes[name]; ok { + ss.Ref = "#/components/securitySchemes/" + name + return + } + } + + if doc.Components == nil { + doc.Components = &Components{} + } + if doc.Components.SecuritySchemes == nil { + doc.Components.SecuritySchemes = make(SecuritySchemes) + } + doc.Components.SecuritySchemes[name] = &SecuritySchemeRef{Value: ss.Value} + ss.Ref = "#/components/securitySchemes/" + name + +} + +func (doc *T) addExampleToSpec(e *ExampleRef, refNameResolver RefNameResolver, parentIsExternal bool) { + if e == nil || !isExternalRef(e.Ref, parentIsExternal) { + return + } + name := refNameResolver(e.Ref) + if doc.Components != nil { + if _, ok := doc.Components.Examples[name]; ok { + e.Ref = "#/components/examples/" + name + return + } + } + + if doc.Components == nil { + doc.Components = &Components{} + } + if doc.Components.Examples == nil { + doc.Components.Examples = make(Examples) + } + doc.Components.Examples[name] = &ExampleRef{Value: e.Value} + e.Ref = "#/components/examples/" + name + +} + +func (doc *T) addLinkToSpec(l *LinkRef, refNameResolver RefNameResolver, parentIsExternal bool) { + if l == nil || !isExternalRef(l.Ref, parentIsExternal) { + return + } + name := refNameResolver(l.Ref) + if doc.Components != nil { + if _, ok := doc.Components.Links[name]; ok { + l.Ref = "#/components/links/" + name + return + } + } + + if doc.Components == nil { + doc.Components = &Components{} + } + if doc.Components.Links == nil { + doc.Components.Links = make(Links) + } + doc.Components.Links[name] = &LinkRef{Value: l.Value} + l.Ref = "#/components/links/" + name + +} + +func (doc *T) addCallbackToSpec(c *CallbackRef, refNameResolver RefNameResolver, parentIsExternal bool) bool { + if c == nil || !isExternalRef(c.Ref, parentIsExternal) { + return false + } + name := refNameResolver(c.Ref) + + if doc.Components == nil { + doc.Components = &Components{} + } + if doc.Components.Callbacks == nil { + doc.Components.Callbacks = make(Callbacks) + } + c.Ref = "#/components/callbacks/" + name + doc.Components.Callbacks[name] = &CallbackRef{Value: c.Value} + return true +} + +func (doc *T) derefSchema(s *Schema, refNameResolver RefNameResolver, parentIsExternal bool) { + if s == nil || doc.isVisitedSchema(s) { + return + } + + for _, list := range []SchemaRefs{s.AllOf, s.AnyOf, s.OneOf} { + for _, s2 := range list { + isExternal := doc.addSchemaToSpec(s2, refNameResolver, parentIsExternal) + if s2 != nil { + doc.derefSchema(s2.Value, refNameResolver, isExternal || parentIsExternal) + } + } + } + for _, s2 := range s.Properties { + isExternal := doc.addSchemaToSpec(s2, refNameResolver, parentIsExternal) + if s2 != nil { + doc.derefSchema(s2.Value, refNameResolver, isExternal || parentIsExternal) + } + } + for _, ref := range []*SchemaRef{s.Not, s.AdditionalProperties.Schema, s.Items} { + isExternal := doc.addSchemaToSpec(ref, refNameResolver, parentIsExternal) + if ref != nil { + doc.derefSchema(ref.Value, refNameResolver, isExternal || parentIsExternal) + } + } +} + +func (doc *T) derefHeaders(hs Headers, refNameResolver RefNameResolver, parentIsExternal bool) { + for _, h := range hs { + isExternal := doc.addHeaderToSpec(h, refNameResolver, parentIsExternal) + if doc.isVisitedHeader(h.Value) { + continue + } + doc.derefParameter(h.Value.Parameter, refNameResolver, parentIsExternal || isExternal) + } +} + +func (doc *T) derefExamples(es Examples, refNameResolver RefNameResolver, parentIsExternal bool) { + for _, e := range es { + doc.addExampleToSpec(e, refNameResolver, parentIsExternal) + } +} + +func (doc *T) derefContent(c Content, refNameResolver RefNameResolver, parentIsExternal bool) { + for _, mediatype := range c { + isExternal := doc.addSchemaToSpec(mediatype.Schema, refNameResolver, parentIsExternal) + if mediatype.Schema != nil { + doc.derefSchema(mediatype.Schema.Value, refNameResolver, isExternal || parentIsExternal) + } + doc.derefExamples(mediatype.Examples, refNameResolver, parentIsExternal) + for _, e := range mediatype.Encoding { + doc.derefHeaders(e.Headers, refNameResolver, parentIsExternal) + } + } +} + +func (doc *T) derefLinks(ls Links, refNameResolver RefNameResolver, parentIsExternal bool) { + for _, l := range ls { + doc.addLinkToSpec(l, refNameResolver, parentIsExternal) + } +} + +func (doc *T) derefResponses(es Responses, refNameResolver RefNameResolver, parentIsExternal bool) { + for _, e := range es { + isExternal := doc.addResponseToSpec(e, refNameResolver, parentIsExternal) + if e.Value != nil { + doc.derefHeaders(e.Value.Headers, refNameResolver, isExternal || parentIsExternal) + doc.derefContent(e.Value.Content, refNameResolver, isExternal || parentIsExternal) + doc.derefLinks(e.Value.Links, refNameResolver, isExternal || parentIsExternal) + } + } +} + +func (doc *T) derefParameter(p Parameter, refNameResolver RefNameResolver, parentIsExternal bool) { + isExternal := doc.addSchemaToSpec(p.Schema, refNameResolver, parentIsExternal) + doc.derefContent(p.Content, refNameResolver, parentIsExternal) + if p.Schema != nil { + doc.derefSchema(p.Schema.Value, refNameResolver, isExternal || parentIsExternal) + } +} + +func (doc *T) derefRequestBody(r RequestBody, refNameResolver RefNameResolver, parentIsExternal bool) { + doc.derefContent(r.Content, refNameResolver, parentIsExternal) +} + +func (doc *T) derefPaths(paths map[string]*PathItem, refNameResolver RefNameResolver, parentIsExternal bool) { + for _, ops := range paths { + if isExternalRef(ops.Ref, parentIsExternal) { + parentIsExternal = true + } + // inline full operations + ops.Ref = "" + + for _, param := range ops.Parameters { + doc.addParameterToSpec(param, refNameResolver, parentIsExternal) + } + + for _, op := range ops.Operations() { + isExternal := doc.addRequestBodyToSpec(op.RequestBody, refNameResolver, parentIsExternal) + if op.RequestBody != nil && op.RequestBody.Value != nil { + doc.derefRequestBody(*op.RequestBody.Value, refNameResolver, parentIsExternal || isExternal) + } + for _, cb := range op.Callbacks { + isExternal := doc.addCallbackToSpec(cb, refNameResolver, parentIsExternal) + if cb.Value != nil { + doc.derefPaths(*cb.Value, refNameResolver, parentIsExternal || isExternal) + } + } + doc.derefResponses(op.Responses, refNameResolver, parentIsExternal) + for _, param := range op.Parameters { + isExternal := doc.addParameterToSpec(param, refNameResolver, parentIsExternal) + if param.Value != nil { + doc.derefParameter(*param.Value, refNameResolver, parentIsExternal || isExternal) + } + } + } + } +} + +// InternalizeRefs removes all references to external files from the spec and moves them +// to the components section. +// +// refNameResolver takes in references to returns a name to store the reference under locally. +// It MUST return a unique name for each reference type. +// A default implementation is provided that will suffice for most use cases. See the function +// documention for more details. +// +// Example: +// +// doc.InternalizeRefs(context.Background(), nil) +func (doc *T) InternalizeRefs(ctx context.Context, refNameResolver func(ref string) string) { + doc.resetVisited() + + if refNameResolver == nil { + refNameResolver = DefaultRefNameResolver + } + + if components := doc.Components; components != nil { + names := schemaNames(components.Schemas) + for _, name := range names { + schema := components.Schemas[name] + isExternal := doc.addSchemaToSpec(schema, refNameResolver, false) + if schema != nil { + schema.Ref = "" // always dereference the top level + doc.derefSchema(schema.Value, refNameResolver, isExternal) + } + } + names = parametersMapNames(components.Parameters) + for _, name := range names { + p := components.Parameters[name] + isExternal := doc.addParameterToSpec(p, refNameResolver, false) + if p != nil && p.Value != nil { + p.Ref = "" // always dereference the top level + doc.derefParameter(*p.Value, refNameResolver, isExternal) + } + } + doc.derefHeaders(components.Headers, refNameResolver, false) + for _, req := range components.RequestBodies { + isExternal := doc.addRequestBodyToSpec(req, refNameResolver, false) + if req != nil && req.Value != nil { + req.Ref = "" // always dereference the top level + doc.derefRequestBody(*req.Value, refNameResolver, isExternal) + } + } + doc.derefResponses(components.Responses, refNameResolver, false) + for _, ss := range components.SecuritySchemes { + doc.addSecuritySchemeToSpec(ss, refNameResolver, false) + } + doc.derefExamples(components.Examples, refNameResolver, false) + doc.derefLinks(components.Links, refNameResolver, false) + for _, cb := range components.Callbacks { + isExternal := doc.addCallbackToSpec(cb, refNameResolver, false) + if cb != nil && cb.Value != nil { + cb.Ref = "" // always dereference the top level + doc.derefPaths(*cb.Value, refNameResolver, isExternal) + } + } + } + + doc.derefPaths(doc.Paths, refNameResolver, false) +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/link.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/link.go new file mode 100644 index 00000000..08dfa8d6 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/link.go @@ -0,0 +1,101 @@ +package openapi3 + +import ( + "context" + "encoding/json" + "errors" + "fmt" + + "github.com/go-openapi/jsonpointer" +) + +type Links map[string]*LinkRef + +// JSONLookup implements github.com/go-openapi/jsonpointer#JSONPointable +func (links Links) JSONLookup(token string) (interface{}, error) { + ref, ok := links[token] + if ok == false { + return nil, fmt.Errorf("object has no field %q", token) + } + + if ref != nil && ref.Ref != "" { + return &Ref{Ref: ref.Ref}, nil + } + return ref.Value, nil +} + +var _ jsonpointer.JSONPointable = (*Links)(nil) + +// Link is specified by OpenAPI/Swagger standard version 3. +// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#link-object +type Link struct { + Extensions map[string]interface{} `json:"-" yaml:"-"` + + OperationRef string `json:"operationRef,omitempty" yaml:"operationRef,omitempty"` + OperationID string `json:"operationId,omitempty" yaml:"operationId,omitempty"` + Description string `json:"description,omitempty" yaml:"description,omitempty"` + Parameters map[string]interface{} `json:"parameters,omitempty" yaml:"parameters,omitempty"` + Server *Server `json:"server,omitempty" yaml:"server,omitempty"` + RequestBody interface{} `json:"requestBody,omitempty" yaml:"requestBody,omitempty"` +} + +// MarshalJSON returns the JSON encoding of Link. +func (link Link) MarshalJSON() ([]byte, error) { + m := make(map[string]interface{}, 6+len(link.Extensions)) + for k, v := range link.Extensions { + m[k] = v + } + + if x := link.OperationRef; x != "" { + m["operationRef"] = x + } + if x := link.OperationID; x != "" { + m["operationId"] = x + } + if x := link.Description; x != "" { + m["description"] = x + } + if x := link.Parameters; len(x) != 0 { + m["parameters"] = x + } + if x := link.Server; x != nil { + m["server"] = x + } + if x := link.RequestBody; x != nil { + m["requestBody"] = x + } + + return json.Marshal(m) +} + +// UnmarshalJSON sets Link to a copy of data. +func (link *Link) UnmarshalJSON(data []byte) error { + type LinkBis Link + var x LinkBis + if err := json.Unmarshal(data, &x); err != nil { + return err + } + _ = json.Unmarshal(data, &x.Extensions) + delete(x.Extensions, "operationRef") + delete(x.Extensions, "operationId") + delete(x.Extensions, "description") + delete(x.Extensions, "parameters") + delete(x.Extensions, "server") + delete(x.Extensions, "requestBody") + *link = Link(x) + return nil +} + +// Validate returns an error if Link does not comply with the OpenAPI spec. +func (link *Link) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + + if link.OperationID == "" && link.OperationRef == "" { + return errors.New("missing operationId or operationRef on link") + } + if link.OperationID != "" && link.OperationRef != "" { + return fmt.Errorf("operationId %q and operationRef %q are mutually exclusive", link.OperationID, link.OperationRef) + } + + return validateExtensions(ctx, link.Extensions) +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/loader.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/loader.go new file mode 100644 index 00000000..4a14f67f --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/loader.go @@ -0,0 +1,1039 @@ +package openapi3 + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "net/url" + "path" + "path/filepath" + "reflect" + "sort" + "strconv" + "strings" + + "github.com/invopop/yaml" +) + +var CircularReferenceError = "kin-openapi bug found: circular schema reference not handled" +var CircularReferenceCounter = 3 + +func foundUnresolvedRef(ref string) error { + return fmt.Errorf("found unresolved ref: %q", ref) +} + +func failedToResolveRefFragmentPart(value, what string) error { + return fmt.Errorf("failed to resolve %q in fragment in URI: %q", what, value) +} + +// Loader helps deserialize an OpenAPIv3 document +type Loader struct { + // IsExternalRefsAllowed enables visiting other files + IsExternalRefsAllowed bool + + // ReadFromURIFunc allows overriding the any file/URL reading func + ReadFromURIFunc ReadFromURIFunc + + Context context.Context + + rootDir string + rootLocation string + + visitedPathItemRefs map[string]struct{} + + visitedDocuments map[string]*T + + visitedCallback map[*Callback]struct{} + visitedExample map[*Example]struct{} + visitedHeader map[*Header]struct{} + visitedLink map[*Link]struct{} + visitedParameter map[*Parameter]struct{} + visitedRequestBody map[*RequestBody]struct{} + visitedResponse map[*Response]struct{} + visitedSchema map[*Schema]struct{} + visitedSecurityScheme map[*SecurityScheme]struct{} +} + +// NewLoader returns an empty Loader +func NewLoader() *Loader { + return &Loader{ + Context: context.Background(), + } +} + +func (loader *Loader) resetVisitedPathItemRefs() { + loader.visitedPathItemRefs = make(map[string]struct{}) +} + +// LoadFromURI loads a spec from a remote URL +func (loader *Loader) LoadFromURI(location *url.URL) (*T, error) { + loader.resetVisitedPathItemRefs() + return loader.loadFromURIInternal(location) +} + +// LoadFromFile loads a spec from a local file path +func (loader *Loader) LoadFromFile(location string) (*T, error) { + loader.rootDir = path.Dir(location) + return loader.LoadFromURI(&url.URL{Path: filepath.ToSlash(location)}) +} + +func (loader *Loader) loadFromURIInternal(location *url.URL) (*T, error) { + data, err := loader.readURL(location) + if err != nil { + return nil, err + } + return loader.loadFromDataWithPathInternal(data, location) +} + +func (loader *Loader) allowsExternalRefs(ref string) (err error) { + if !loader.IsExternalRefsAllowed { + err = fmt.Errorf("encountered disallowed external reference: %q", ref) + } + return +} + +// loadSingleElementFromURI reads the data from ref and unmarshals to the passed element. +func (loader *Loader) loadSingleElementFromURI(ref string, rootPath *url.URL, element interface{}) (*url.URL, error) { + if err := loader.allowsExternalRefs(ref); err != nil { + return nil, err + } + + parsedURL, err := url.Parse(ref) + if err != nil { + return nil, err + } + if fragment := parsedURL.Fragment; fragment != "" { + return nil, fmt.Errorf("unexpected ref fragment %q", fragment) + } + + resolvedPath, err := resolvePath(rootPath, parsedURL) + if err != nil { + return nil, fmt.Errorf("could not resolve path: %w", err) + } + + data, err := loader.readURL(resolvedPath) + if err != nil { + return nil, err + } + if err := unmarshal(data, element); err != nil { + return nil, err + } + + return resolvedPath, nil +} + +func (loader *Loader) readURL(location *url.URL) ([]byte, error) { + if f := loader.ReadFromURIFunc; f != nil { + return f(loader, location) + } + return DefaultReadFromURI(loader, location) +} + +// LoadFromData loads a spec from a byte array +func (loader *Loader) LoadFromData(data []byte) (*T, error) { + loader.resetVisitedPathItemRefs() + doc := &T{} + if err := unmarshal(data, doc); err != nil { + return nil, err + } + if err := loader.ResolveRefsIn(doc, nil); err != nil { + return nil, err + } + return doc, nil +} + +// LoadFromDataWithPath takes the OpenAPI document data in bytes and a path where the resolver can find referred +// elements and returns a *T with all resolved data or an error if unable to load data or resolve refs. +func (loader *Loader) LoadFromDataWithPath(data []byte, location *url.URL) (*T, error) { + loader.resetVisitedPathItemRefs() + return loader.loadFromDataWithPathInternal(data, location) +} + +func (loader *Loader) loadFromDataWithPathInternal(data []byte, location *url.URL) (*T, error) { + if loader.visitedDocuments == nil { + loader.visitedDocuments = make(map[string]*T) + loader.rootLocation = location.Path + } + uri := location.String() + if doc, ok := loader.visitedDocuments[uri]; ok { + return doc, nil + } + + doc := &T{} + loader.visitedDocuments[uri] = doc + + if err := unmarshal(data, doc); err != nil { + return nil, err + } + if err := loader.ResolveRefsIn(doc, location); err != nil { + return nil, err + } + + return doc, nil +} + +func unmarshal(data []byte, v interface{}) error { + // See https://github.com/getkin/kin-openapi/issues/680 + if err := json.Unmarshal(data, v); err != nil { + // UnmarshalStrict(data, v) TODO: investigate how ymlv3 handles duplicate map keys + return yaml.Unmarshal(data, v) + } + return nil +} + +// ResolveRefsIn expands references if for instance spec was just unmarshalled +func (loader *Loader) ResolveRefsIn(doc *T, location *url.URL) (err error) { + if loader.Context == nil { + loader.Context = context.Background() + } + + if loader.visitedPathItemRefs == nil { + loader.resetVisitedPathItemRefs() + } + + if components := doc.Components; components != nil { + for _, component := range components.Headers { + if err = loader.resolveHeaderRef(doc, component, location); err != nil { + return + } + } + for _, component := range components.Parameters { + if err = loader.resolveParameterRef(doc, component, location); err != nil { + return + } + } + for _, component := range components.RequestBodies { + if err = loader.resolveRequestBodyRef(doc, component, location); err != nil { + return + } + } + for _, component := range components.Responses { + if err = loader.resolveResponseRef(doc, component, location); err != nil { + return + } + } + for _, component := range components.Schemas { + if err = loader.resolveSchemaRef(doc, component, location, []string{}); err != nil { + return + } + } + for _, component := range components.SecuritySchemes { + if err = loader.resolveSecuritySchemeRef(doc, component, location); err != nil { + return + } + } + + examples := make([]string, 0, len(components.Examples)) + for name := range components.Examples { + examples = append(examples, name) + } + sort.Strings(examples) + for _, name := range examples { + component := components.Examples[name] + if err = loader.resolveExampleRef(doc, component, location); err != nil { + return + } + } + + for _, component := range components.Callbacks { + if err = loader.resolveCallbackRef(doc, component, location); err != nil { + return + } + } + } + + // Visit all operations + for _, pathItem := range doc.Paths { + if pathItem == nil { + continue + } + if err = loader.resolvePathItemRef(doc, pathItem, location); err != nil { + return + } + } + + return +} + +func join(basePath *url.URL, relativePath *url.URL) (*url.URL, error) { + if basePath == nil { + return relativePath, nil + } + newPath, err := url.Parse(basePath.String()) + if err != nil { + return nil, fmt.Errorf("cannot copy path: %q", basePath.String()) + } + newPath.Path = path.Join(path.Dir(newPath.Path), relativePath.Path) + return newPath, nil +} + +func resolvePath(basePath *url.URL, componentPath *url.URL) (*url.URL, error) { + if componentPath.Scheme == "" && componentPath.Host == "" { + // support absolute paths + if componentPath.Path[0] == '/' { + return componentPath, nil + } + return join(basePath, componentPath) + } + return componentPath, nil +} + +func isSingleRefElement(ref string) bool { + return !strings.Contains(ref, "#") +} + +func (loader *Loader) resolveComponent(doc *T, ref string, path *url.URL, resolved interface{}) ( + componentDoc *T, + componentPath *url.URL, + err error, +) { + if componentDoc, ref, componentPath, err = loader.resolveRef(doc, ref, path); err != nil { + return nil, nil, err + } + + parsedURL, err := url.Parse(ref) + if err != nil { + return nil, nil, fmt.Errorf("cannot parse reference: %q: %v", ref, parsedURL) + } + fragment := parsedURL.Fragment + if !strings.HasPrefix(fragment, "/") { + return nil, nil, fmt.Errorf("expected fragment prefix '#/' in URI %q", ref) + } + + drill := func(cursor interface{}) (interface{}, error) { + for _, pathPart := range strings.Split(fragment[1:], "/") { + pathPart = unescapeRefString(pathPart) + + if cursor, err = drillIntoField(cursor, pathPart); err != nil { + e := failedToResolveRefFragmentPart(ref, pathPart) + return nil, fmt.Errorf("%s: %w", e, err) + } + if cursor == nil { + return nil, failedToResolveRefFragmentPart(ref, pathPart) + } + } + return cursor, nil + } + var cursor interface{} + if cursor, err = drill(componentDoc); err != nil { + if path == nil { + return nil, nil, err + } + var err2 error + data, err2 := loader.readURL(path) + if err2 != nil { + return nil, nil, err + } + if err2 = unmarshal(data, &cursor); err2 != nil { + return nil, nil, err + } + if cursor, err2 = drill(cursor); err2 != nil || cursor == nil { + return nil, nil, err + } + err = nil + } + + switch { + case reflect.TypeOf(cursor) == reflect.TypeOf(resolved): + reflect.ValueOf(resolved).Elem().Set(reflect.ValueOf(cursor).Elem()) + return componentDoc, componentPath, nil + + case reflect.TypeOf(cursor) == reflect.TypeOf(map[string]interface{}{}): + codec := func(got, expect interface{}) error { + enc, err := json.Marshal(got) + if err != nil { + return err + } + if err = json.Unmarshal(enc, expect); err != nil { + return err + } + return nil + } + if err := codec(cursor, resolved); err != nil { + return nil, nil, fmt.Errorf("bad data in %q (expecting %s)", ref, readableType(resolved)) + } + return componentDoc, componentPath, nil + + default: + return nil, nil, fmt.Errorf("bad data in %q (expecting %s)", ref, readableType(resolved)) + } +} + +func readableType(x interface{}) string { + switch x.(type) { + case *Callback: + return "callback object" + case *CallbackRef: + return "ref to callback object" + case *ExampleRef: + return "ref to example object" + case *HeaderRef: + return "ref to header object" + case *LinkRef: + return "ref to link object" + case *ParameterRef: + return "ref to parameter object" + case *PathItem: + return "pathItem object" + case *RequestBodyRef: + return "ref to requestBody object" + case *ResponseRef: + return "ref to response object" + case *SchemaRef: + return "ref to schema object" + case *SecuritySchemeRef: + return "ref to securityScheme object" + default: + panic(fmt.Sprintf("unreachable %T", x)) + } +} + +func drillIntoField(cursor interface{}, fieldName string) (interface{}, error) { + // Special case due to multijson + if s, ok := cursor.(*SchemaRef); ok && fieldName == "additionalProperties" { + if ap := s.Value.AdditionalProperties.Has; ap != nil { + return *ap, nil + } + return s.Value.AdditionalProperties.Schema, nil + } + + switch val := reflect.Indirect(reflect.ValueOf(cursor)); val.Kind() { + case reflect.Map: + elementValue := val.MapIndex(reflect.ValueOf(fieldName)) + if !elementValue.IsValid() { + return nil, fmt.Errorf("map key %q not found", fieldName) + } + return elementValue.Interface(), nil + + case reflect.Slice: + i, err := strconv.ParseUint(fieldName, 10, 32) + if err != nil { + return nil, err + } + index := int(i) + if 0 > index || index >= val.Len() { + return nil, errors.New("slice index out of bounds") + } + return val.Index(index).Interface(), nil + + case reflect.Struct: + hasFields := false + for i := 0; i < val.NumField(); i++ { + hasFields = true + if fieldName == strings.Split(val.Type().Field(i).Tag.Get("yaml"), ",")[0] { + return val.Field(i).Interface(), nil + } + } + // if cursor is a "ref wrapper" struct (e.g. RequestBodyRef), + if _, ok := val.Type().FieldByName("Value"); ok { + // try digging into its Value field + return drillIntoField(val.FieldByName("Value").Interface(), fieldName) + } + if hasFields { + if ff := val.Type().Field(0); ff.PkgPath == "" && ff.Name == "Extensions" { + extensions := val.Field(0).Interface().(map[string]interface{}) + if enc, ok := extensions[fieldName]; ok { + return enc, nil + } + } + } + return nil, fmt.Errorf("struct field %q not found", fieldName) + + default: + return nil, errors.New("not a map, slice nor struct") + } +} + +func (loader *Loader) resolveRef(doc *T, ref string, path *url.URL) (*T, string, *url.URL, error) { + if ref != "" && ref[0] == '#' { + return doc, ref, path, nil + } + + if err := loader.allowsExternalRefs(ref); err != nil { + return nil, "", nil, err + } + + parsedURL, err := url.Parse(ref) + if err != nil { + return nil, "", nil, fmt.Errorf("cannot parse reference: %q: %v", ref, parsedURL) + } + fragment := parsedURL.Fragment + parsedURL.Fragment = "" + + var resolvedPath *url.URL + if resolvedPath, err = resolvePath(path, parsedURL); err != nil { + return nil, "", nil, fmt.Errorf("error resolving path: %w", err) + } + + if doc, err = loader.loadFromURIInternal(resolvedPath); err != nil { + return nil, "", nil, fmt.Errorf("error resolving reference %q: %w", ref, err) + } + + return doc, "#" + fragment, resolvedPath, nil +} + +func (loader *Loader) resolveHeaderRef(doc *T, component *HeaderRef, documentPath *url.URL) (err error) { + if component != nil && component.Value != nil { + if loader.visitedHeader == nil { + loader.visitedHeader = make(map[*Header]struct{}) + } + if _, ok := loader.visitedHeader[component.Value]; ok { + return nil + } + loader.visitedHeader[component.Value] = struct{}{} + } + + if component == nil { + return errors.New("invalid header: value MUST be an object") + } + if ref := component.Ref; ref != "" { + if isSingleRefElement(ref) { + var header Header + if documentPath, err = loader.loadSingleElementFromURI(ref, documentPath, &header); err != nil { + return err + } + component.Value = &header + } else { + var resolved HeaderRef + doc, componentPath, err := loader.resolveComponent(doc, ref, documentPath, &resolved) + if err != nil { + return err + } + if err := loader.resolveHeaderRef(doc, &resolved, componentPath); err != nil { + return err + } + component.Value = resolved.Value + } + } + value := component.Value + if value == nil { + return nil + } + + if schema := value.Schema; schema != nil { + if err := loader.resolveSchemaRef(doc, schema, documentPath, []string{}); err != nil { + return err + } + } + return nil +} + +func (loader *Loader) resolveParameterRef(doc *T, component *ParameterRef, documentPath *url.URL) (err error) { + if component != nil && component.Value != nil { + if loader.visitedParameter == nil { + loader.visitedParameter = make(map[*Parameter]struct{}) + } + if _, ok := loader.visitedParameter[component.Value]; ok { + return nil + } + loader.visitedParameter[component.Value] = struct{}{} + } + + if component == nil { + return errors.New("invalid parameter: value MUST be an object") + } + ref := component.Ref + if ref != "" { + if isSingleRefElement(ref) { + var param Parameter + if documentPath, err = loader.loadSingleElementFromURI(ref, documentPath, ¶m); err != nil { + return err + } + component.Value = ¶m + } else { + var resolved ParameterRef + doc, componentPath, err := loader.resolveComponent(doc, ref, documentPath, &resolved) + if err != nil { + return err + } + if err := loader.resolveParameterRef(doc, &resolved, componentPath); err != nil { + return err + } + component.Value = resolved.Value + } + } + value := component.Value + if value == nil { + return nil + } + + if value.Content != nil && value.Schema != nil { + return errors.New("cannot contain both schema and content in a parameter") + } + for _, contentType := range value.Content { + if schema := contentType.Schema; schema != nil { + if err := loader.resolveSchemaRef(doc, schema, documentPath, []string{}); err != nil { + return err + } + } + } + if schema := value.Schema; schema != nil { + if err := loader.resolveSchemaRef(doc, schema, documentPath, []string{}); err != nil { + return err + } + } + return nil +} + +func (loader *Loader) resolveRequestBodyRef(doc *T, component *RequestBodyRef, documentPath *url.URL) (err error) { + if component != nil && component.Value != nil { + if loader.visitedRequestBody == nil { + loader.visitedRequestBody = make(map[*RequestBody]struct{}) + } + if _, ok := loader.visitedRequestBody[component.Value]; ok { + return nil + } + loader.visitedRequestBody[component.Value] = struct{}{} + } + + if component == nil { + return errors.New("invalid requestBody: value MUST be an object") + } + if ref := component.Ref; ref != "" { + if isSingleRefElement(ref) { + var requestBody RequestBody + if documentPath, err = loader.loadSingleElementFromURI(ref, documentPath, &requestBody); err != nil { + return err + } + component.Value = &requestBody + } else { + var resolved RequestBodyRef + doc, componentPath, err := loader.resolveComponent(doc, ref, documentPath, &resolved) + if err != nil { + return err + } + if err = loader.resolveRequestBodyRef(doc, &resolved, componentPath); err != nil { + return err + } + component.Value = resolved.Value + } + } + value := component.Value + if value == nil { + return nil + } + + for _, contentType := range value.Content { + examples := make([]string, 0, len(contentType.Examples)) + for name := range contentType.Examples { + examples = append(examples, name) + } + sort.Strings(examples) + for _, name := range examples { + example := contentType.Examples[name] + if err := loader.resolveExampleRef(doc, example, documentPath); err != nil { + return err + } + contentType.Examples[name] = example + } + if schema := contentType.Schema; schema != nil { + if err := loader.resolveSchemaRef(doc, schema, documentPath, []string{}); err != nil { + return err + } + } + } + return nil +} + +func (loader *Loader) resolveResponseRef(doc *T, component *ResponseRef, documentPath *url.URL) (err error) { + if component != nil && component.Value != nil { + if loader.visitedResponse == nil { + loader.visitedResponse = make(map[*Response]struct{}) + } + if _, ok := loader.visitedResponse[component.Value]; ok { + return nil + } + loader.visitedResponse[component.Value] = struct{}{} + } + + if component == nil { + return errors.New("invalid response: value MUST be an object") + } + ref := component.Ref + if ref != "" { + if isSingleRefElement(ref) { + var resp Response + if documentPath, err = loader.loadSingleElementFromURI(ref, documentPath, &resp); err != nil { + return err + } + component.Value = &resp + } else { + var resolved ResponseRef + doc, componentPath, err := loader.resolveComponent(doc, ref, documentPath, &resolved) + if err != nil { + return err + } + if err := loader.resolveResponseRef(doc, &resolved, componentPath); err != nil { + return err + } + component.Value = resolved.Value + } + } + value := component.Value + if value == nil { + return nil + } + + for _, header := range value.Headers { + if err := loader.resolveHeaderRef(doc, header, documentPath); err != nil { + return err + } + } + for _, contentType := range value.Content { + if contentType == nil { + continue + } + examples := make([]string, 0, len(contentType.Examples)) + for name := range contentType.Examples { + examples = append(examples, name) + } + sort.Strings(examples) + for _, name := range examples { + example := contentType.Examples[name] + if err := loader.resolveExampleRef(doc, example, documentPath); err != nil { + return err + } + contentType.Examples[name] = example + } + if schema := contentType.Schema; schema != nil { + if err := loader.resolveSchemaRef(doc, schema, documentPath, []string{}); err != nil { + return err + } + contentType.Schema = schema + } + } + for _, link := range value.Links { + if err := loader.resolveLinkRef(doc, link, documentPath); err != nil { + return err + } + } + return nil +} + +func (loader *Loader) resolveSchemaRef(doc *T, component *SchemaRef, documentPath *url.URL, visited []string) (err error) { + if component == nil { + return errors.New("invalid schema: value MUST be an object") + } + + if component.Value != nil { + if loader.visitedSchema == nil { + loader.visitedSchema = make(map[*Schema]struct{}) + } + if _, ok := loader.visitedSchema[component.Value]; ok { + return nil + } + loader.visitedSchema[component.Value] = struct{}{} + } + + ref := component.Ref + if ref != "" { + if isSingleRefElement(ref) { + var schema Schema + if documentPath, err = loader.loadSingleElementFromURI(ref, documentPath, &schema); err != nil { + return err + } + component.Value = &schema + } else { + if visitedLimit(visited, ref) { + visited = append(visited, ref) + return fmt.Errorf("%s - %s", CircularReferenceError, strings.Join(visited, " -> ")) + } + visited = append(visited, ref) + + var resolved SchemaRef + doc, componentPath, err := loader.resolveComponent(doc, ref, documentPath, &resolved) + if err != nil { + return err + } + if err := loader.resolveSchemaRef(doc, &resolved, componentPath, visited); err != nil { + return err + } + component.Value = resolved.Value + } + if loader.visitedSchema == nil { + loader.visitedSchema = make(map[*Schema]struct{}) + } + loader.visitedSchema[component.Value] = struct{}{} + } + value := component.Value + if value == nil { + return nil + } + + // ResolveRefs referred schemas + if v := value.Items; v != nil { + if err := loader.resolveSchemaRef(doc, v, documentPath, visited); err != nil { + return err + } + } + for _, v := range value.Properties { + if err := loader.resolveSchemaRef(doc, v, documentPath, visited); err != nil { + return err + } + } + if v := value.AdditionalProperties.Schema; v != nil { + if err := loader.resolveSchemaRef(doc, v, documentPath, visited); err != nil { + return err + } + } + if v := value.Not; v != nil { + if err := loader.resolveSchemaRef(doc, v, documentPath, visited); err != nil { + return err + } + } + for _, v := range value.AllOf { + if err := loader.resolveSchemaRef(doc, v, documentPath, visited); err != nil { + return err + } + } + for _, v := range value.AnyOf { + if err := loader.resolveSchemaRef(doc, v, documentPath, visited); err != nil { + return err + } + } + for _, v := range value.OneOf { + if err := loader.resolveSchemaRef(doc, v, documentPath, visited); err != nil { + return err + } + } + return nil +} + +func (loader *Loader) resolveSecuritySchemeRef(doc *T, component *SecuritySchemeRef, documentPath *url.URL) (err error) { + if component != nil && component.Value != nil { + if loader.visitedSecurityScheme == nil { + loader.visitedSecurityScheme = make(map[*SecurityScheme]struct{}) + } + if _, ok := loader.visitedSecurityScheme[component.Value]; ok { + return nil + } + loader.visitedSecurityScheme[component.Value] = struct{}{} + } + + if component == nil { + return errors.New("invalid securityScheme: value MUST be an object") + } + if ref := component.Ref; ref != "" { + if isSingleRefElement(ref) { + var scheme SecurityScheme + if _, err = loader.loadSingleElementFromURI(ref, documentPath, &scheme); err != nil { + return err + } + component.Value = &scheme + } else { + var resolved SecuritySchemeRef + doc, componentPath, err := loader.resolveComponent(doc, ref, documentPath, &resolved) + if err != nil { + return err + } + if err := loader.resolveSecuritySchemeRef(doc, &resolved, componentPath); err != nil { + return err + } + component.Value = resolved.Value + } + } + return nil +} + +func (loader *Loader) resolveExampleRef(doc *T, component *ExampleRef, documentPath *url.URL) (err error) { + if component != nil && component.Value != nil { + if loader.visitedExample == nil { + loader.visitedExample = make(map[*Example]struct{}) + } + if _, ok := loader.visitedExample[component.Value]; ok { + return nil + } + loader.visitedExample[component.Value] = struct{}{} + } + + if component == nil { + return errors.New("invalid example: value MUST be an object") + } + if ref := component.Ref; ref != "" { + if isSingleRefElement(ref) { + var example Example + if _, err = loader.loadSingleElementFromURI(ref, documentPath, &example); err != nil { + return err + } + component.Value = &example + } else { + var resolved ExampleRef + doc, componentPath, err := loader.resolveComponent(doc, ref, documentPath, &resolved) + if err != nil { + return err + } + if err := loader.resolveExampleRef(doc, &resolved, componentPath); err != nil { + return err + } + component.Value = resolved.Value + } + } + return nil +} + +func (loader *Loader) resolveCallbackRef(doc *T, component *CallbackRef, documentPath *url.URL) (err error) { + if component != nil && component.Value != nil { + if loader.visitedCallback == nil { + loader.visitedCallback = make(map[*Callback]struct{}) + } + if _, ok := loader.visitedCallback[component.Value]; ok { + return nil + } + loader.visitedCallback[component.Value] = struct{}{} + } + + if component == nil { + return errors.New("invalid callback: value MUST be an object") + } + if ref := component.Ref; ref != "" { + if isSingleRefElement(ref) { + var resolved Callback + if documentPath, err = loader.loadSingleElementFromURI(ref, documentPath, &resolved); err != nil { + return err + } + component.Value = &resolved + } else { + var resolved CallbackRef + doc, componentPath, err := loader.resolveComponent(doc, ref, documentPath, &resolved) + if err != nil { + return err + } + if err = loader.resolveCallbackRef(doc, &resolved, componentPath); err != nil { + return err + } + component.Value = resolved.Value + } + } + value := component.Value + if value == nil { + return nil + } + + for _, pathItem := range *value { + if err = loader.resolvePathItemRef(doc, pathItem, documentPath); err != nil { + return err + } + } + return nil +} + +func (loader *Loader) resolveLinkRef(doc *T, component *LinkRef, documentPath *url.URL) (err error) { + if component != nil && component.Value != nil { + if loader.visitedLink == nil { + loader.visitedLink = make(map[*Link]struct{}) + } + if _, ok := loader.visitedLink[component.Value]; ok { + return nil + } + loader.visitedLink[component.Value] = struct{}{} + } + + if component == nil { + return errors.New("invalid link: value MUST be an object") + } + if ref := component.Ref; ref != "" { + if isSingleRefElement(ref) { + var link Link + if _, err = loader.loadSingleElementFromURI(ref, documentPath, &link); err != nil { + return err + } + component.Value = &link + } else { + var resolved LinkRef + doc, componentPath, err := loader.resolveComponent(doc, ref, documentPath, &resolved) + if err != nil { + return err + } + if err := loader.resolveLinkRef(doc, &resolved, componentPath); err != nil { + return err + } + component.Value = resolved.Value + } + } + return nil +} + +func (loader *Loader) resolvePathItemRef(doc *T, pathItem *PathItem, documentPath *url.URL) (err error) { + if pathItem == nil { + return errors.New("invalid path item: value MUST be an object") + } + ref := pathItem.Ref + if ref != "" { + if pathItem.Summary != "" || + pathItem.Description != "" || + pathItem.Connect != nil || + pathItem.Delete != nil || + pathItem.Get != nil || + pathItem.Head != nil || + pathItem.Options != nil || + pathItem.Patch != nil || + pathItem.Post != nil || + pathItem.Put != nil || + pathItem.Trace != nil || + len(pathItem.Servers) != 0 || + len(pathItem.Parameters) != 0 { + return nil + } + if isSingleRefElement(ref) { + var p PathItem + if documentPath, err = loader.loadSingleElementFromURI(ref, documentPath, &p); err != nil { + return err + } + *pathItem = p + } else { + var resolved PathItem + if doc, documentPath, err = loader.resolveComponent(doc, ref, documentPath, &resolved); err != nil { + return err + } + *pathItem = resolved + } + pathItem.Ref = ref + } + + for _, parameter := range pathItem.Parameters { + if err = loader.resolveParameterRef(doc, parameter, documentPath); err != nil { + return + } + } + for _, operation := range pathItem.Operations() { + for _, parameter := range operation.Parameters { + if err = loader.resolveParameterRef(doc, parameter, documentPath); err != nil { + return + } + } + if requestBody := operation.RequestBody; requestBody != nil { + if err = loader.resolveRequestBodyRef(doc, requestBody, documentPath); err != nil { + return + } + } + for _, response := range operation.Responses { + if err = loader.resolveResponseRef(doc, response, documentPath); err != nil { + return + } + } + for _, callback := range operation.Callbacks { + if err = loader.resolveCallbackRef(doc, callback, documentPath); err != nil { + return + } + } + } + return +} + +func unescapeRefString(ref string) string { + return strings.Replace(strings.Replace(ref, "~1", "/", -1), "~0", "~", -1) +} + +func visitedLimit(visited []string, ref string) bool { + visitedCount := 0 + for _, v := range visited { + if v == ref { + visitedCount++ + if visitedCount >= CircularReferenceCounter { + return true + } + } + } + return false +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/loader_uri_reader.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/loader_uri_reader.go new file mode 100644 index 00000000..92ac043f --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/loader_uri_reader.go @@ -0,0 +1,112 @@ +package openapi3 + +import ( + "errors" + "fmt" + "io/ioutil" + "net/http" + "net/url" + "path/filepath" + "sync" +) + +// ReadFromURIFunc defines a function which reads the contents of a resource +// located at a URI. +type ReadFromURIFunc func(loader *Loader, url *url.URL) ([]byte, error) + +var uriMu = &sync.RWMutex{} + +// ErrURINotSupported indicates the ReadFromURIFunc does not know how to handle a +// given URI. +var ErrURINotSupported = errors.New("unsupported URI") + +// ReadFromURIs returns a ReadFromURIFunc which tries to read a URI using the +// given reader functions, in the same order. If a reader function does not +// support the URI and returns ErrURINotSupported, the next function is checked +// until a match is found, or the URI is not supported by any. +func ReadFromURIs(readers ...ReadFromURIFunc) ReadFromURIFunc { + return func(loader *Loader, url *url.URL) ([]byte, error) { + for i := range readers { + buf, err := readers[i](loader, url) + if err == ErrURINotSupported { + continue + } else if err != nil { + return nil, err + } + return buf, nil + } + return nil, ErrURINotSupported + } +} + +// DefaultReadFromURI returns a caching ReadFromURIFunc which can read remote +// HTTP URIs and local file URIs. +var DefaultReadFromURI = URIMapCache(ReadFromURIs(ReadFromHTTP(http.DefaultClient), ReadFromFile)) + +// ReadFromHTTP returns a ReadFromURIFunc which uses the given http.Client to +// read the contents from a remote HTTP URI. This client may be customized to +// implement timeouts, RFC 7234 caching, etc. +func ReadFromHTTP(cl *http.Client) ReadFromURIFunc { + return func(loader *Loader, location *url.URL) ([]byte, error) { + if location.Scheme == "" || location.Host == "" { + return nil, ErrURINotSupported + } + req, err := http.NewRequest("GET", location.String(), nil) + if err != nil { + return nil, err + } + resp, err := cl.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + if resp.StatusCode > 399 { + return nil, fmt.Errorf("error loading %q: request returned status code %d", location.String(), resp.StatusCode) + } + return ioutil.ReadAll(resp.Body) + } +} + +// ReadFromFile is a ReadFromURIFunc which reads local file URIs. +func ReadFromFile(loader *Loader, location *url.URL) ([]byte, error) { + if location.Host != "" { + return nil, ErrURINotSupported + } + if location.Scheme != "" && location.Scheme != "file" { + return nil, ErrURINotSupported + } + return ioutil.ReadFile(location.Path) +} + +// URIMapCache returns a ReadFromURIFunc that caches the contents read from URI +// locations in a simple map. This cache implementation is suitable for +// short-lived processes such as command-line tools which process OpenAPI +// documents. +func URIMapCache(reader ReadFromURIFunc) ReadFromURIFunc { + cache := map[string][]byte{} + return func(loader *Loader, location *url.URL) (buf []byte, err error) { + if location.Scheme == "" || location.Scheme == "file" { + if !filepath.IsAbs(location.Path) { + // Do not cache relative file paths; this can cause trouble if + // the current working directory changes when processing + // multiple top-level documents. + return reader(loader, location) + } + } + uri := location.String() + var ok bool + uriMu.RLock() + if buf, ok = cache[uri]; ok { + uriMu.RUnlock() + return + } + uriMu.RUnlock() + if buf, err = reader(loader, location); err != nil { + return + } + uriMu.Lock() + defer uriMu.Unlock() + cache[uri] = buf + return + } +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/media_type.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/media_type.go new file mode 100644 index 00000000..2a9b4721 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/media_type.go @@ -0,0 +1,167 @@ +package openapi3 + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "sort" + + "github.com/go-openapi/jsonpointer" +) + +// MediaType is specified by OpenAPI/Swagger 3.0 standard. +// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#media-type-object +type MediaType struct { + Extensions map[string]interface{} `json:"-" yaml:"-"` + + Schema *SchemaRef `json:"schema,omitempty" yaml:"schema,omitempty"` + Example interface{} `json:"example,omitempty" yaml:"example,omitempty"` + Examples Examples `json:"examples,omitempty" yaml:"examples,omitempty"` + Encoding map[string]*Encoding `json:"encoding,omitempty" yaml:"encoding,omitempty"` +} + +var _ jsonpointer.JSONPointable = (*MediaType)(nil) + +func NewMediaType() *MediaType { + return &MediaType{} +} + +func (mediaType *MediaType) WithSchema(schema *Schema) *MediaType { + if schema == nil { + mediaType.Schema = nil + } else { + mediaType.Schema = &SchemaRef{Value: schema} + } + return mediaType +} + +func (mediaType *MediaType) WithSchemaRef(schema *SchemaRef) *MediaType { + mediaType.Schema = schema + return mediaType +} + +func (mediaType *MediaType) WithExample(name string, value interface{}) *MediaType { + example := mediaType.Examples + if example == nil { + example = make(map[string]*ExampleRef) + mediaType.Examples = example + } + example[name] = &ExampleRef{ + Value: NewExample(value), + } + return mediaType +} + +func (mediaType *MediaType) WithEncoding(name string, enc *Encoding) *MediaType { + encoding := mediaType.Encoding + if encoding == nil { + encoding = make(map[string]*Encoding) + mediaType.Encoding = encoding + } + encoding[name] = enc + return mediaType +} + +// MarshalJSON returns the JSON encoding of MediaType. +func (mediaType MediaType) MarshalJSON() ([]byte, error) { + m := make(map[string]interface{}, 4+len(mediaType.Extensions)) + for k, v := range mediaType.Extensions { + m[k] = v + } + if x := mediaType.Schema; x != nil { + m["schema"] = x + } + if x := mediaType.Example; x != nil { + m["example"] = x + } + if x := mediaType.Examples; len(x) != 0 { + m["examples"] = x + } + if x := mediaType.Encoding; len(x) != 0 { + m["encoding"] = x + } + return json.Marshal(m) +} + +// UnmarshalJSON sets MediaType to a copy of data. +func (mediaType *MediaType) UnmarshalJSON(data []byte) error { + type MediaTypeBis MediaType + var x MediaTypeBis + if err := json.Unmarshal(data, &x); err != nil { + return err + } + _ = json.Unmarshal(data, &x.Extensions) + delete(x.Extensions, "schema") + delete(x.Extensions, "example") + delete(x.Extensions, "examples") + delete(x.Extensions, "encoding") + *mediaType = MediaType(x) + return nil +} + +// Validate returns an error if MediaType does not comply with the OpenAPI spec. +func (mediaType *MediaType) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + + if mediaType == nil { + return nil + } + if schema := mediaType.Schema; schema != nil { + if err := schema.Validate(ctx); err != nil { + return err + } + + if mediaType.Example != nil && mediaType.Examples != nil { + return errors.New("example and examples are mutually exclusive") + } + + if vo := getValidationOptions(ctx); !vo.examplesValidationDisabled { + if example := mediaType.Example; example != nil { + if err := validateExampleValue(ctx, example, schema.Value); err != nil { + return fmt.Errorf("invalid example: %w", err) + } + } + + if examples := mediaType.Examples; examples != nil { + names := make([]string, 0, len(examples)) + for name := range examples { + names = append(names, name) + } + sort.Strings(names) + for _, k := range names { + v := examples[k] + if err := v.Validate(ctx); err != nil { + return fmt.Errorf("example %s: %w", k, err) + } + if err := validateExampleValue(ctx, v.Value.Value, schema.Value); err != nil { + return fmt.Errorf("example %s: %w", k, err) + } + } + } + } + } + + return validateExtensions(ctx, mediaType.Extensions) +} + +// JSONLookup implements github.com/go-openapi/jsonpointer#JSONPointable +func (mediaType MediaType) JSONLookup(token string) (interface{}, error) { + switch token { + case "schema": + if mediaType.Schema != nil { + if mediaType.Schema.Ref != "" { + return &Ref{Ref: mediaType.Schema.Ref}, nil + } + return mediaType.Schema.Value, nil + } + case "example": + return mediaType.Example, nil + case "examples": + return mediaType.Examples, nil + case "encoding": + return mediaType.Encoding, nil + } + v, _, err := jsonpointer.GetForToken(mediaType.Extensions, token) + return v, err +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/openapi3.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/openapi3.go new file mode 100644 index 00000000..8b8f71bb --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/openapi3.go @@ -0,0 +1,156 @@ +package openapi3 + +import ( + "context" + "encoding/json" + "errors" + "fmt" +) + +// T is the root of an OpenAPI v3 document +// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#openapi-object +type T struct { + Extensions map[string]interface{} `json:"-" yaml:"-"` + + OpenAPI string `json:"openapi" yaml:"openapi"` // Required + Components *Components `json:"components,omitempty" yaml:"components,omitempty"` + Info *Info `json:"info" yaml:"info"` // Required + Paths Paths `json:"paths" yaml:"paths"` // Required + Security SecurityRequirements `json:"security,omitempty" yaml:"security,omitempty"` + Servers Servers `json:"servers,omitempty" yaml:"servers,omitempty"` + Tags Tags `json:"tags,omitempty" yaml:"tags,omitempty"` + ExternalDocs *ExternalDocs `json:"externalDocs,omitempty" yaml:"externalDocs,omitempty"` + + visited visitedComponent +} + +// MarshalJSON returns the JSON encoding of T. +func (doc T) MarshalJSON() ([]byte, error) { + m := make(map[string]interface{}, 4+len(doc.Extensions)) + for k, v := range doc.Extensions { + m[k] = v + } + m["openapi"] = doc.OpenAPI + if x := doc.Components; x != nil { + m["components"] = x + } + m["info"] = doc.Info + m["paths"] = doc.Paths + if x := doc.Security; len(x) != 0 { + m["security"] = x + } + if x := doc.Servers; len(x) != 0 { + m["servers"] = x + } + if x := doc.Tags; len(x) != 0 { + m["tags"] = x + } + if x := doc.ExternalDocs; x != nil { + m["externalDocs"] = x + } + return json.Marshal(m) +} + +// UnmarshalJSON sets T to a copy of data. +func (doc *T) UnmarshalJSON(data []byte) error { + type TBis T + var x TBis + if err := json.Unmarshal(data, &x); err != nil { + return err + } + _ = json.Unmarshal(data, &x.Extensions) + delete(x.Extensions, "openapi") + delete(x.Extensions, "components") + delete(x.Extensions, "info") + delete(x.Extensions, "paths") + delete(x.Extensions, "security") + delete(x.Extensions, "servers") + delete(x.Extensions, "tags") + delete(x.Extensions, "externalDocs") + *doc = T(x) + return nil +} + +func (doc *T) AddOperation(path string, method string, operation *Operation) { + if doc.Paths == nil { + doc.Paths = make(Paths) + } + pathItem := doc.Paths[path] + if pathItem == nil { + pathItem = &PathItem{} + doc.Paths[path] = pathItem + } + pathItem.SetOperation(method, operation) +} + +func (doc *T) AddServer(server *Server) { + doc.Servers = append(doc.Servers, server) +} + +// Validate returns an error if T does not comply with the OpenAPI spec. +// Validations Options can be provided to modify the validation behavior. +func (doc *T) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + + if doc.OpenAPI == "" { + return errors.New("value of openapi must be a non-empty string") + } + + var wrap func(error) error + // NOTE: only mention info/components/paths/... key in this func's errors. + + wrap = func(e error) error { return fmt.Errorf("invalid components: %w", e) } + if v := doc.Components; v != nil { + if err := v.Validate(ctx); err != nil { + return wrap(err) + } + } + + wrap = func(e error) error { return fmt.Errorf("invalid info: %w", e) } + if v := doc.Info; v != nil { + if err := v.Validate(ctx); err != nil { + return wrap(err) + } + } else { + return wrap(errors.New("must be an object")) + } + + wrap = func(e error) error { return fmt.Errorf("invalid paths: %w", e) } + if v := doc.Paths; v != nil { + if err := v.Validate(ctx); err != nil { + return wrap(err) + } + } else { + return wrap(errors.New("must be an object")) + } + + wrap = func(e error) error { return fmt.Errorf("invalid security: %w", e) } + if v := doc.Security; v != nil { + if err := v.Validate(ctx); err != nil { + return wrap(err) + } + } + + wrap = func(e error) error { return fmt.Errorf("invalid servers: %w", e) } + if v := doc.Servers; v != nil { + if err := v.Validate(ctx); err != nil { + return wrap(err) + } + } + + wrap = func(e error) error { return fmt.Errorf("invalid tags: %w", e) } + if v := doc.Tags; v != nil { + if err := v.Validate(ctx); err != nil { + return wrap(err) + } + } + + wrap = func(e error) error { return fmt.Errorf("invalid external docs: %w", e) } + if v := doc.ExternalDocs; v != nil { + if err := v.Validate(ctx); err != nil { + return wrap(err) + } + } + + return validateExtensions(ctx, doc.Extensions) +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/operation.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/operation.go new file mode 100644 index 00000000..645c0805 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/operation.go @@ -0,0 +1,216 @@ +package openapi3 + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "strconv" + + "github.com/go-openapi/jsonpointer" +) + +// Operation represents "operation" specified by" OpenAPI/Swagger 3.0 standard. +// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#operation-object +type Operation struct { + Extensions map[string]interface{} `json:"-" yaml:"-"` + + // Optional tags for documentation. + Tags []string `json:"tags,omitempty" yaml:"tags,omitempty"` + + // Optional short summary. + Summary string `json:"summary,omitempty" yaml:"summary,omitempty"` + + // Optional description. Should use CommonMark syntax. + Description string `json:"description,omitempty" yaml:"description,omitempty"` + + // Optional operation ID. + OperationID string `json:"operationId,omitempty" yaml:"operationId,omitempty"` + + // Optional parameters. + Parameters Parameters `json:"parameters,omitempty" yaml:"parameters,omitempty"` + + // Optional body parameter. + RequestBody *RequestBodyRef `json:"requestBody,omitempty" yaml:"requestBody,omitempty"` + + // Responses. + Responses Responses `json:"responses" yaml:"responses"` // Required + + // Optional callbacks + Callbacks Callbacks `json:"callbacks,omitempty" yaml:"callbacks,omitempty"` + + Deprecated bool `json:"deprecated,omitempty" yaml:"deprecated,omitempty"` + + // Optional security requirements that overrides top-level security. + Security *SecurityRequirements `json:"security,omitempty" yaml:"security,omitempty"` + + // Optional servers that overrides top-level servers. + Servers *Servers `json:"servers,omitempty" yaml:"servers,omitempty"` + + ExternalDocs *ExternalDocs `json:"externalDocs,omitempty" yaml:"externalDocs,omitempty"` +} + +var _ jsonpointer.JSONPointable = (*Operation)(nil) + +func NewOperation() *Operation { + return &Operation{} +} + +// MarshalJSON returns the JSON encoding of Operation. +func (operation Operation) MarshalJSON() ([]byte, error) { + m := make(map[string]interface{}, 12+len(operation.Extensions)) + for k, v := range operation.Extensions { + m[k] = v + } + if x := operation.Tags; len(x) != 0 { + m["tags"] = x + } + if x := operation.Summary; x != "" { + m["summary"] = x + } + if x := operation.Description; x != "" { + m["description"] = x + } + if x := operation.OperationID; x != "" { + m["operationId"] = x + } + if x := operation.Parameters; len(x) != 0 { + m["parameters"] = x + } + if x := operation.RequestBody; x != nil { + m["requestBody"] = x + } + m["responses"] = operation.Responses + if x := operation.Callbacks; len(x) != 0 { + m["callbacks"] = x + } + if x := operation.Deprecated; x { + m["deprecated"] = x + } + if x := operation.Security; x != nil { + m["security"] = x + } + if x := operation.Servers; x != nil { + m["servers"] = x + } + if x := operation.ExternalDocs; x != nil { + m["externalDocs"] = x + } + return json.Marshal(m) +} + +// UnmarshalJSON sets Operation to a copy of data. +func (operation *Operation) UnmarshalJSON(data []byte) error { + type OperationBis Operation + var x OperationBis + if err := json.Unmarshal(data, &x); err != nil { + return err + } + _ = json.Unmarshal(data, &x.Extensions) + delete(x.Extensions, "tags") + delete(x.Extensions, "summary") + delete(x.Extensions, "description") + delete(x.Extensions, "operationId") + delete(x.Extensions, "parameters") + delete(x.Extensions, "requestBody") + delete(x.Extensions, "responses") + delete(x.Extensions, "callbacks") + delete(x.Extensions, "deprecated") + delete(x.Extensions, "security") + delete(x.Extensions, "servers") + delete(x.Extensions, "externalDocs") + *operation = Operation(x) + return nil +} + +// JSONLookup implements github.com/go-openapi/jsonpointer#JSONPointable +func (operation Operation) JSONLookup(token string) (interface{}, error) { + switch token { + case "requestBody": + if operation.RequestBody != nil { + if operation.RequestBody.Ref != "" { + return &Ref{Ref: operation.RequestBody.Ref}, nil + } + return operation.RequestBody.Value, nil + } + case "tags": + return operation.Tags, nil + case "summary": + return operation.Summary, nil + case "description": + return operation.Description, nil + case "operationID": + return operation.OperationID, nil + case "parameters": + return operation.Parameters, nil + case "responses": + return operation.Responses, nil + case "callbacks": + return operation.Callbacks, nil + case "deprecated": + return operation.Deprecated, nil + case "security": + return operation.Security, nil + case "servers": + return operation.Servers, nil + case "externalDocs": + return operation.ExternalDocs, nil + } + + v, _, err := jsonpointer.GetForToken(operation.Extensions, token) + return v, err +} + +func (operation *Operation) AddParameter(p *Parameter) { + operation.Parameters = append(operation.Parameters, &ParameterRef{ + Value: p, + }) +} + +func (operation *Operation) AddResponse(status int, response *Response) { + responses := operation.Responses + if responses == nil { + responses = NewResponses() + operation.Responses = responses + } + code := "default" + if status != 0 { + code = strconv.FormatInt(int64(status), 10) + } + responses[code] = &ResponseRef{ + Value: response, + } +} + +// Validate returns an error if Operation does not comply with the OpenAPI spec. +func (operation *Operation) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + + if v := operation.Parameters; v != nil { + if err := v.Validate(ctx); err != nil { + return err + } + } + + if v := operation.RequestBody; v != nil { + if err := v.Validate(ctx); err != nil { + return err + } + } + + if v := operation.Responses; v != nil { + if err := v.Validate(ctx); err != nil { + return err + } + } else { + return errors.New("value of responses must be an object") + } + + if v := operation.ExternalDocs; v != nil { + if err := v.Validate(ctx); err != nil { + return fmt.Errorf("invalid external docs: %w", err) + } + } + + return validateExtensions(ctx, operation.Extensions) +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/parameter.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/parameter.go new file mode 100644 index 00000000..ec1893e9 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/parameter.go @@ -0,0 +1,418 @@ +package openapi3 + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "sort" + "strconv" + + "github.com/go-openapi/jsonpointer" +) + +type ParametersMap map[string]*ParameterRef + +var _ jsonpointer.JSONPointable = (*ParametersMap)(nil) + +// JSONLookup implements github.com/go-openapi/jsonpointer#JSONPointable +func (p ParametersMap) JSONLookup(token string) (interface{}, error) { + ref, ok := p[token] + if ref == nil || ok == false { + return nil, fmt.Errorf("object has no field %q", token) + } + + if ref.Ref != "" { + return &Ref{Ref: ref.Ref}, nil + } + return ref.Value, nil +} + +// Parameters is specified by OpenAPI/Swagger 3.0 standard. +type Parameters []*ParameterRef + +var _ jsonpointer.JSONPointable = (*Parameters)(nil) + +// JSONLookup implements github.com/go-openapi/jsonpointer#JSONPointable +func (p Parameters) JSONLookup(token string) (interface{}, error) { + index, err := strconv.Atoi(token) + if err != nil { + return nil, err + } + + if index < 0 || index >= len(p) { + return nil, fmt.Errorf("index %d out of bounds of array of length %d", index, len(p)) + } + + ref := p[index] + + if ref != nil && ref.Ref != "" { + return &Ref{Ref: ref.Ref}, nil + } + return ref.Value, nil +} + +func NewParameters() Parameters { + return make(Parameters, 0, 4) +} + +func (parameters Parameters) GetByInAndName(in string, name string) *Parameter { + for _, item := range parameters { + if v := item.Value; v != nil { + if v.Name == name && v.In == in { + return v + } + } + } + return nil +} + +// Validate returns an error if Parameters does not comply with the OpenAPI spec. +func (parameters Parameters) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + + dupes := make(map[string]struct{}) + for _, parameterRef := range parameters { + if v := parameterRef.Value; v != nil { + key := v.In + ":" + v.Name + if _, ok := dupes[key]; ok { + return fmt.Errorf("more than one %q parameter has name %q", v.In, v.Name) + } + dupes[key] = struct{}{} + } + + if err := parameterRef.Validate(ctx); err != nil { + return err + } + } + return nil +} + +// Parameter is specified by OpenAPI/Swagger 3.0 standard. +// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#parameter-object +type Parameter struct { + Extensions map[string]interface{} `json:"-" yaml:"-"` + + Name string `json:"name,omitempty" yaml:"name,omitempty"` + In string `json:"in,omitempty" yaml:"in,omitempty"` + Description string `json:"description,omitempty" yaml:"description,omitempty"` + Style string `json:"style,omitempty" yaml:"style,omitempty"` + Explode *bool `json:"explode,omitempty" yaml:"explode,omitempty"` + AllowEmptyValue bool `json:"allowEmptyValue,omitempty" yaml:"allowEmptyValue,omitempty"` + AllowReserved bool `json:"allowReserved,omitempty" yaml:"allowReserved,omitempty"` + Deprecated bool `json:"deprecated,omitempty" yaml:"deprecated,omitempty"` + Required bool `json:"required,omitempty" yaml:"required,omitempty"` + Schema *SchemaRef `json:"schema,omitempty" yaml:"schema,omitempty"` + Example interface{} `json:"example,omitempty" yaml:"example,omitempty"` + Examples Examples `json:"examples,omitempty" yaml:"examples,omitempty"` + Content Content `json:"content,omitempty" yaml:"content,omitempty"` +} + +var _ jsonpointer.JSONPointable = (*Parameter)(nil) + +const ( + ParameterInPath = "path" + ParameterInQuery = "query" + ParameterInHeader = "header" + ParameterInCookie = "cookie" +) + +func NewPathParameter(name string) *Parameter { + return &Parameter{ + Name: name, + In: ParameterInPath, + Required: true, + } +} + +func NewQueryParameter(name string) *Parameter { + return &Parameter{ + Name: name, + In: ParameterInQuery, + } +} + +func NewHeaderParameter(name string) *Parameter { + return &Parameter{ + Name: name, + In: ParameterInHeader, + } +} + +func NewCookieParameter(name string) *Parameter { + return &Parameter{ + Name: name, + In: ParameterInCookie, + } +} + +func (parameter *Parameter) WithDescription(value string) *Parameter { + parameter.Description = value + return parameter +} + +func (parameter *Parameter) WithRequired(value bool) *Parameter { + parameter.Required = value + return parameter +} + +func (parameter *Parameter) WithSchema(value *Schema) *Parameter { + if value == nil { + parameter.Schema = nil + } else { + parameter.Schema = &SchemaRef{ + Value: value, + } + } + return parameter +} + +// MarshalJSON returns the JSON encoding of Parameter. +func (parameter Parameter) MarshalJSON() ([]byte, error) { + m := make(map[string]interface{}, 13+len(parameter.Extensions)) + for k, v := range parameter.Extensions { + m[k] = v + } + + if x := parameter.Name; x != "" { + m["name"] = x + } + if x := parameter.In; x != "" { + m["in"] = x + } + if x := parameter.Description; x != "" { + m["description"] = x + } + if x := parameter.Style; x != "" { + m["style"] = x + } + if x := parameter.Explode; x != nil { + m["explode"] = x + } + if x := parameter.AllowEmptyValue; x { + m["allowEmptyValue"] = x + } + if x := parameter.AllowReserved; x { + m["allowReserved"] = x + } + if x := parameter.Deprecated; x { + m["deprecated"] = x + } + if x := parameter.Required; x { + m["required"] = x + } + if x := parameter.Schema; x != nil { + m["schema"] = x + } + if x := parameter.Example; x != nil { + m["example"] = x + } + if x := parameter.Examples; len(x) != 0 { + m["examples"] = x + } + if x := parameter.Content; len(x) != 0 { + m["content"] = x + } + + return json.Marshal(m) +} + +// UnmarshalJSON sets Parameter to a copy of data. +func (parameter *Parameter) UnmarshalJSON(data []byte) error { + type ParameterBis Parameter + var x ParameterBis + if err := json.Unmarshal(data, &x); err != nil { + return err + } + _ = json.Unmarshal(data, &x.Extensions) + + delete(x.Extensions, "name") + delete(x.Extensions, "in") + delete(x.Extensions, "description") + delete(x.Extensions, "style") + delete(x.Extensions, "explode") + delete(x.Extensions, "allowEmptyValue") + delete(x.Extensions, "allowReserved") + delete(x.Extensions, "deprecated") + delete(x.Extensions, "required") + delete(x.Extensions, "schema") + delete(x.Extensions, "example") + delete(x.Extensions, "examples") + delete(x.Extensions, "content") + + *parameter = Parameter(x) + return nil +} + +// JSONLookup implements github.com/go-openapi/jsonpointer#JSONPointable +func (parameter Parameter) JSONLookup(token string) (interface{}, error) { + switch token { + case "schema": + if parameter.Schema != nil { + if parameter.Schema.Ref != "" { + return &Ref{Ref: parameter.Schema.Ref}, nil + } + return parameter.Schema.Value, nil + } + case "name": + return parameter.Name, nil + case "in": + return parameter.In, nil + case "description": + return parameter.Description, nil + case "style": + return parameter.Style, nil + case "explode": + return parameter.Explode, nil + case "allowEmptyValue": + return parameter.AllowEmptyValue, nil + case "allowReserved": + return parameter.AllowReserved, nil + case "deprecated": + return parameter.Deprecated, nil + case "required": + return parameter.Required, nil + case "example": + return parameter.Example, nil + case "examples": + return parameter.Examples, nil + case "content": + return parameter.Content, nil + } + + v, _, err := jsonpointer.GetForToken(parameter.Extensions, token) + return v, err +} + +// SerializationMethod returns a parameter's serialization method. +// When a parameter's serialization method is not defined the method returns +// the default serialization method corresponding to a parameter's location. +func (parameter *Parameter) SerializationMethod() (*SerializationMethod, error) { + switch parameter.In { + case ParameterInPath, ParameterInHeader: + style := parameter.Style + if style == "" { + style = SerializationSimple + } + explode := false + if parameter.Explode != nil { + explode = *parameter.Explode + } + return &SerializationMethod{Style: style, Explode: explode}, nil + case ParameterInQuery, ParameterInCookie: + style := parameter.Style + if style == "" { + style = SerializationForm + } + explode := true + if parameter.Explode != nil { + explode = *parameter.Explode + } + return &SerializationMethod{Style: style, Explode: explode}, nil + default: + return nil, fmt.Errorf("unexpected parameter's 'in': %q", parameter.In) + } +} + +// Validate returns an error if Parameter does not comply with the OpenAPI spec. +func (parameter *Parameter) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + + if parameter.Name == "" { + return errors.New("parameter name can't be blank") + } + in := parameter.In + switch in { + case + ParameterInPath, + ParameterInQuery, + ParameterInHeader, + ParameterInCookie: + default: + return fmt.Errorf("parameter can't have 'in' value %q", parameter.In) + } + + if in == ParameterInPath && !parameter.Required { + return fmt.Errorf("path parameter %q must be required", parameter.Name) + } + + // Validate a parameter's serialization method. + sm, err := parameter.SerializationMethod() + if err != nil { + return err + } + var smSupported bool + switch { + case parameter.In == ParameterInPath && sm.Style == SerializationSimple && !sm.Explode, + parameter.In == ParameterInPath && sm.Style == SerializationSimple && sm.Explode, + parameter.In == ParameterInPath && sm.Style == SerializationLabel && !sm.Explode, + parameter.In == ParameterInPath && sm.Style == SerializationLabel && sm.Explode, + parameter.In == ParameterInPath && sm.Style == SerializationMatrix && !sm.Explode, + parameter.In == ParameterInPath && sm.Style == SerializationMatrix && sm.Explode, + + parameter.In == ParameterInQuery && sm.Style == SerializationForm && sm.Explode, + parameter.In == ParameterInQuery && sm.Style == SerializationForm && !sm.Explode, + parameter.In == ParameterInQuery && sm.Style == SerializationSpaceDelimited && sm.Explode, + parameter.In == ParameterInQuery && sm.Style == SerializationSpaceDelimited && !sm.Explode, + parameter.In == ParameterInQuery && sm.Style == SerializationPipeDelimited && sm.Explode, + parameter.In == ParameterInQuery && sm.Style == SerializationPipeDelimited && !sm.Explode, + parameter.In == ParameterInQuery && sm.Style == SerializationDeepObject && sm.Explode, + + parameter.In == ParameterInHeader && sm.Style == SerializationSimple && !sm.Explode, + parameter.In == ParameterInHeader && sm.Style == SerializationSimple && sm.Explode, + + parameter.In == ParameterInCookie && sm.Style == SerializationForm && !sm.Explode, + parameter.In == ParameterInCookie && sm.Style == SerializationForm && sm.Explode: + smSupported = true + } + if !smSupported { + e := fmt.Errorf("serialization method with style=%q and explode=%v is not supported by a %s parameter", sm.Style, sm.Explode, in) + return fmt.Errorf("parameter %q schema is invalid: %w", parameter.Name, e) + } + + if (parameter.Schema == nil) == (parameter.Content == nil) { + e := errors.New("parameter must contain exactly one of content and schema") + return fmt.Errorf("parameter %q schema is invalid: %w", parameter.Name, e) + } + + if content := parameter.Content; content != nil { + if err := content.Validate(ctx); err != nil { + return fmt.Errorf("parameter %q content is invalid: %w", parameter.Name, err) + } + } + + if schema := parameter.Schema; schema != nil { + if err := schema.Validate(ctx); err != nil { + return fmt.Errorf("parameter %q schema is invalid: %w", parameter.Name, err) + } + if parameter.Example != nil && parameter.Examples != nil { + return fmt.Errorf("parameter %q example and examples are mutually exclusive", parameter.Name) + } + + if vo := getValidationOptions(ctx); vo.examplesValidationDisabled { + return nil + } + if example := parameter.Example; example != nil { + if err := validateExampleValue(ctx, example, schema.Value); err != nil { + return fmt.Errorf("invalid example: %w", err) + } + } else if examples := parameter.Examples; examples != nil { + names := make([]string, 0, len(examples)) + for name := range examples { + names = append(names, name) + } + sort.Strings(names) + for _, k := range names { + v := examples[k] + if err := v.Validate(ctx); err != nil { + return fmt.Errorf("%s: %w", k, err) + } + if err := validateExampleValue(ctx, v.Value.Value, schema.Value); err != nil { + return fmt.Errorf("%s: %w", k, err) + } + } + } + } + + return validateExtensions(ctx, parameter.Extensions) +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/path_item.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/path_item.go new file mode 100644 index 00000000..fab75d93 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/path_item.go @@ -0,0 +1,211 @@ +package openapi3 + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "sort" +) + +// PathItem is specified by OpenAPI/Swagger standard version 3. +// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#path-item-object +type PathItem struct { + Extensions map[string]interface{} `json:"-" yaml:"-"` + + Ref string `json:"$ref,omitempty" yaml:"$ref,omitempty"` + Summary string `json:"summary,omitempty" yaml:"summary,omitempty"` + Description string `json:"description,omitempty" yaml:"description,omitempty"` + Connect *Operation `json:"connect,omitempty" yaml:"connect,omitempty"` + Delete *Operation `json:"delete,omitempty" yaml:"delete,omitempty"` + Get *Operation `json:"get,omitempty" yaml:"get,omitempty"` + Head *Operation `json:"head,omitempty" yaml:"head,omitempty"` + Options *Operation `json:"options,omitempty" yaml:"options,omitempty"` + Patch *Operation `json:"patch,omitempty" yaml:"patch,omitempty"` + Post *Operation `json:"post,omitempty" yaml:"post,omitempty"` + Put *Operation `json:"put,omitempty" yaml:"put,omitempty"` + Trace *Operation `json:"trace,omitempty" yaml:"trace,omitempty"` + Servers Servers `json:"servers,omitempty" yaml:"servers,omitempty"` + Parameters Parameters `json:"parameters,omitempty" yaml:"parameters,omitempty"` +} + +// MarshalJSON returns the JSON encoding of PathItem. +func (pathItem PathItem) MarshalJSON() ([]byte, error) { + if ref := pathItem.Ref; ref != "" { + return json.Marshal(Ref{Ref: ref}) + } + + m := make(map[string]interface{}, 13+len(pathItem.Extensions)) + for k, v := range pathItem.Extensions { + m[k] = v + } + if x := pathItem.Summary; x != "" { + m["summary"] = x + } + if x := pathItem.Description; x != "" { + m["description"] = x + } + if x := pathItem.Connect; x != nil { + m["connect"] = x + } + if x := pathItem.Delete; x != nil { + m["delete"] = x + } + if x := pathItem.Get; x != nil { + m["get"] = x + } + if x := pathItem.Head; x != nil { + m["head"] = x + } + if x := pathItem.Options; x != nil { + m["options"] = x + } + if x := pathItem.Patch; x != nil { + m["patch"] = x + } + if x := pathItem.Post; x != nil { + m["post"] = x + } + if x := pathItem.Put; x != nil { + m["put"] = x + } + if x := pathItem.Trace; x != nil { + m["trace"] = x + } + if x := pathItem.Servers; len(x) != 0 { + m["servers"] = x + } + if x := pathItem.Parameters; len(x) != 0 { + m["parameters"] = x + } + return json.Marshal(m) +} + +// UnmarshalJSON sets PathItem to a copy of data. +func (pathItem *PathItem) UnmarshalJSON(data []byte) error { + type PathItemBis PathItem + var x PathItemBis + if err := json.Unmarshal(data, &x); err != nil { + return err + } + _ = json.Unmarshal(data, &x.Extensions) + delete(x.Extensions, "$ref") + delete(x.Extensions, "summary") + delete(x.Extensions, "description") + delete(x.Extensions, "connect") + delete(x.Extensions, "delete") + delete(x.Extensions, "get") + delete(x.Extensions, "head") + delete(x.Extensions, "options") + delete(x.Extensions, "patch") + delete(x.Extensions, "post") + delete(x.Extensions, "put") + delete(x.Extensions, "trace") + delete(x.Extensions, "servers") + delete(x.Extensions, "parameters") + *pathItem = PathItem(x) + return nil +} + +func (pathItem *PathItem) Operations() map[string]*Operation { + operations := make(map[string]*Operation) + if v := pathItem.Connect; v != nil { + operations[http.MethodConnect] = v + } + if v := pathItem.Delete; v != nil { + operations[http.MethodDelete] = v + } + if v := pathItem.Get; v != nil { + operations[http.MethodGet] = v + } + if v := pathItem.Head; v != nil { + operations[http.MethodHead] = v + } + if v := pathItem.Options; v != nil { + operations[http.MethodOptions] = v + } + if v := pathItem.Patch; v != nil { + operations[http.MethodPatch] = v + } + if v := pathItem.Post; v != nil { + operations[http.MethodPost] = v + } + if v := pathItem.Put; v != nil { + operations[http.MethodPut] = v + } + if v := pathItem.Trace; v != nil { + operations[http.MethodTrace] = v + } + return operations +} + +func (pathItem *PathItem) GetOperation(method string) *Operation { + switch method { + case http.MethodConnect: + return pathItem.Connect + case http.MethodDelete: + return pathItem.Delete + case http.MethodGet: + return pathItem.Get + case http.MethodHead: + return pathItem.Head + case http.MethodOptions: + return pathItem.Options + case http.MethodPatch: + return pathItem.Patch + case http.MethodPost: + return pathItem.Post + case http.MethodPut: + return pathItem.Put + case http.MethodTrace: + return pathItem.Trace + default: + panic(fmt.Errorf("unsupported HTTP method %q", method)) + } +} + +func (pathItem *PathItem) SetOperation(method string, operation *Operation) { + switch method { + case http.MethodConnect: + pathItem.Connect = operation + case http.MethodDelete: + pathItem.Delete = operation + case http.MethodGet: + pathItem.Get = operation + case http.MethodHead: + pathItem.Head = operation + case http.MethodOptions: + pathItem.Options = operation + case http.MethodPatch: + pathItem.Patch = operation + case http.MethodPost: + pathItem.Post = operation + case http.MethodPut: + pathItem.Put = operation + case http.MethodTrace: + pathItem.Trace = operation + default: + panic(fmt.Errorf("unsupported HTTP method %q", method)) + } +} + +// Validate returns an error if PathItem does not comply with the OpenAPI spec. +func (pathItem *PathItem) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + + operations := pathItem.Operations() + + methods := make([]string, 0, len(operations)) + for method := range operations { + methods = append(methods, method) + } + sort.Strings(methods) + for _, method := range methods { + operation := operations[method] + if err := operation.Validate(ctx); err != nil { + return fmt.Errorf("invalid operation %s: %v", method, err) + } + } + + return validateExtensions(ctx, pathItem.Extensions) +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/paths.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/paths.go new file mode 100644 index 00000000..0986b055 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/paths.go @@ -0,0 +1,243 @@ +package openapi3 + +import ( + "context" + "fmt" + "sort" + "strings" +) + +// Paths is specified by OpenAPI/Swagger standard version 3. +// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#paths-object +type Paths map[string]*PathItem + +// Validate returns an error if Paths does not comply with the OpenAPI spec. +func (paths Paths) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + + normalizedPaths := make(map[string]string, len(paths)) + + keys := make([]string, 0, len(paths)) + for key := range paths { + keys = append(keys, key) + } + sort.Strings(keys) + for _, path := range keys { + pathItem := paths[path] + if path == "" || path[0] != '/' { + return fmt.Errorf("path %q does not start with a forward slash (/)", path) + } + + if pathItem == nil { + pathItem = &PathItem{} + paths[path] = pathItem + } + + normalizedPath, _, varsInPath := normalizeTemplatedPath(path) + if oldPath, ok := normalizedPaths[normalizedPath]; ok { + return fmt.Errorf("conflicting paths %q and %q", path, oldPath) + } + normalizedPaths[path] = path + + var commonParams []string + for _, parameterRef := range pathItem.Parameters { + if parameterRef != nil { + if parameter := parameterRef.Value; parameter != nil && parameter.In == ParameterInPath { + commonParams = append(commonParams, parameter.Name) + } + } + } + operations := pathItem.Operations() + methods := make([]string, 0, len(operations)) + for method := range operations { + methods = append(methods, method) + } + sort.Strings(methods) + for _, method := range methods { + operation := operations[method] + var setParams []string + for _, parameterRef := range operation.Parameters { + if parameterRef != nil { + if parameter := parameterRef.Value; parameter != nil && parameter.In == ParameterInPath { + setParams = append(setParams, parameter.Name) + } + } + } + if expected := len(setParams) + len(commonParams); expected != len(varsInPath) { + expected -= len(varsInPath) + if expected < 0 { + expected *= -1 + } + missing := make(map[string]struct{}, expected) + definedParams := append(setParams, commonParams...) + for _, name := range definedParams { + if _, ok := varsInPath[name]; !ok { + missing[name] = struct{}{} + } + } + for name := range varsInPath { + got := false + for _, othername := range definedParams { + if othername == name { + got = true + break + } + } + if !got { + missing[name] = struct{}{} + } + } + if len(missing) != 0 { + missings := make([]string, 0, len(missing)) + for name := range missing { + missings = append(missings, name) + } + return fmt.Errorf("operation %s %s must define exactly all path parameters (missing: %v)", method, path, missings) + } + } + } + + if err := pathItem.Validate(ctx); err != nil { + return fmt.Errorf("invalid path %s: %v", path, err) + } + } + + if err := paths.validateUniqueOperationIDs(); err != nil { + return err + } + + return nil +} + +// InMatchingOrder returns paths in the order they are matched against URLs. +// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#paths-object +// When matching URLs, concrete (non-templated) paths would be matched +// before their templated counterparts. +func (paths Paths) InMatchingOrder() []string { + // NOTE: sorting by number of variables ASC then by descending lexicographical + // order seems to be a good heuristic. + if paths == nil { + return nil + } + + vars := make(map[int][]string) + max := 0 + for path := range paths { + count := strings.Count(path, "}") + vars[count] = append(vars[count], path) + if count > max { + max = count + } + } + + ordered := make([]string, 0, len(paths)) + for c := 0; c <= max; c++ { + if ps, ok := vars[c]; ok { + sort.Sort(sort.Reverse(sort.StringSlice(ps))) + ordered = append(ordered, ps...) + } + } + return ordered +} + +// Find returns a path that matches the key. +// +// The method ignores differences in template variable names (except possible "*" suffix). +// +// For example: +// +// paths := openapi3.Paths { +// "/person/{personName}": &openapi3.PathItem{}, +// } +// pathItem := path.Find("/person/{name}") +// +// would return the correct path item. +func (paths Paths) Find(key string) *PathItem { + // Try directly access the map + pathItem := paths[key] + if pathItem != nil { + return pathItem + } + + normalizedPath, expected, _ := normalizeTemplatedPath(key) + for path, pathItem := range paths { + pathNormalized, got, _ := normalizeTemplatedPath(path) + if got == expected && pathNormalized == normalizedPath { + return pathItem + } + } + return nil +} + +func (paths Paths) validateUniqueOperationIDs() error { + operationIDs := make(map[string]string) + for urlPath, pathItem := range paths { + if pathItem == nil { + continue + } + for httpMethod, operation := range pathItem.Operations() { + if operation == nil || operation.OperationID == "" { + continue + } + endpoint := httpMethod + " " + urlPath + if endpointDup, ok := operationIDs[operation.OperationID]; ok { + if endpoint > endpointDup { // For make error message a bit more deterministic. May be useful for tests. + endpoint, endpointDup = endpointDup, endpoint + } + return fmt.Errorf("operations %q and %q have the same operation id %q", + endpoint, endpointDup, operation.OperationID) + } + operationIDs[operation.OperationID] = endpoint + } + } + return nil +} + +func normalizeTemplatedPath(path string) (string, uint, map[string]struct{}) { + if strings.IndexByte(path, '{') < 0 { + return path, 0, nil + } + + var buffTpl strings.Builder + buffTpl.Grow(len(path)) + + var ( + cc rune + count uint + isVariable bool + vars = make(map[string]struct{}) + buffVar strings.Builder + ) + for i, c := range path { + if isVariable { + if c == '}' { + // End path variable + isVariable = false + + vars[buffVar.String()] = struct{}{} + buffVar = strings.Builder{} + + // First append possible '*' before this character + // The character '}' will be appended + if i > 0 && cc == '*' { + buffTpl.WriteRune(cc) + } + } else { + buffVar.WriteRune(c) + continue + } + + } else if c == '{' { + // Begin path variable + isVariable = true + + // The character '{' will be appended + count++ + } + + // Append the character + buffTpl.WriteRune(c) + cc = c + } + return buffTpl.String(), count, vars +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/ref.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/ref.go new file mode 100644 index 00000000..a937de4a --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/ref.go @@ -0,0 +1,7 @@ +package openapi3 + +// Ref is specified by OpenAPI/Swagger 3.0 standard. +// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#reference-object +type Ref struct { + Ref string `json:"$ref" yaml:"$ref"` +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/refs.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/refs.go new file mode 100644 index 00000000..15f5179d --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/refs.go @@ -0,0 +1,695 @@ +package openapi3 + +import ( + "context" + "encoding/json" + "fmt" + "sort" + + "github.com/go-openapi/jsonpointer" + "github.com/perimeterx/marshmallow" +) + +// CallbackRef represents either a Callback or a $ref to a Callback. +// When serializing and both fields are set, Ref is preferred over Value. +type CallbackRef struct { + Ref string + Value *Callback + extra []string +} + +var _ jsonpointer.JSONPointable = (*CallbackRef)(nil) + +// MarshalYAML returns the YAML encoding of CallbackRef. +func (x CallbackRef) MarshalYAML() (interface{}, error) { + if ref := x.Ref; ref != "" { + return &Ref{Ref: ref}, nil + } + return x.Value, nil +} + +// MarshalJSON returns the JSON encoding of CallbackRef. +func (x CallbackRef) MarshalJSON() ([]byte, error) { + if ref := x.Ref; ref != "" { + return json.Marshal(Ref{Ref: ref}) + } + return json.Marshal(x.Value) +} + +// UnmarshalJSON sets CallbackRef to a copy of data. +func (x *CallbackRef) UnmarshalJSON(data []byte) error { + var refOnly Ref + if extra, err := marshmallow.Unmarshal(data, &refOnly, marshmallow.WithExcludeKnownFieldsFromMap(true)); err == nil && refOnly.Ref != "" { + x.Ref = refOnly.Ref + if len(extra) != 0 { + x.extra = make([]string, 0, len(extra)) + for key := range extra { + x.extra = append(x.extra, key) + } + sort.Strings(x.extra) + } + return nil + } + return json.Unmarshal(data, &x.Value) +} + +// Validate returns an error if CallbackRef does not comply with the OpenAPI spec. +func (x *CallbackRef) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + if extra := x.extra; len(extra) != 0 { + extras := make([]string, 0, len(extra)) + allowed := getValidationOptions(ctx).extraSiblingFieldsAllowed + if allowed == nil { + allowed = make(map[string]struct{}, 0) + } + for _, ex := range extra { + if _, ok := allowed[ex]; !ok { + extras = append(extras, ex) + } + } + if len(extras) != 0 { + return fmt.Errorf("extra sibling fields: %+v", extras) + } + } + if v := x.Value; v != nil { + return v.Validate(ctx) + } + return foundUnresolvedRef(x.Ref) +} + +// JSONLookup implements github.com/go-openapi/jsonpointer#JSONPointable +func (x *CallbackRef) JSONLookup(token string) (interface{}, error) { + if token == "$ref" { + return x.Ref, nil + } + ptr, _, err := jsonpointer.GetForToken(x.Value, token) + return ptr, err +} + +// ExampleRef represents either a Example or a $ref to a Example. +// When serializing and both fields are set, Ref is preferred over Value. +type ExampleRef struct { + Ref string + Value *Example + extra []string +} + +var _ jsonpointer.JSONPointable = (*ExampleRef)(nil) + +// MarshalYAML returns the YAML encoding of ExampleRef. +func (x ExampleRef) MarshalYAML() (interface{}, error) { + if ref := x.Ref; ref != "" { + return &Ref{Ref: ref}, nil + } + return x.Value, nil +} + +// MarshalJSON returns the JSON encoding of ExampleRef. +func (x ExampleRef) MarshalJSON() ([]byte, error) { + if ref := x.Ref; ref != "" { + return json.Marshal(Ref{Ref: ref}) + } + return x.Value.MarshalJSON() +} + +// UnmarshalJSON sets ExampleRef to a copy of data. +func (x *ExampleRef) UnmarshalJSON(data []byte) error { + var refOnly Ref + if extra, err := marshmallow.Unmarshal(data, &refOnly, marshmallow.WithExcludeKnownFieldsFromMap(true)); err == nil && refOnly.Ref != "" { + x.Ref = refOnly.Ref + if len(extra) != 0 { + x.extra = make([]string, 0, len(extra)) + for key := range extra { + x.extra = append(x.extra, key) + } + sort.Strings(x.extra) + } + return nil + } + return json.Unmarshal(data, &x.Value) +} + +// Validate returns an error if ExampleRef does not comply with the OpenAPI spec. +func (x *ExampleRef) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + if extra := x.extra; len(extra) != 0 { + extras := make([]string, 0, len(extra)) + allowed := getValidationOptions(ctx).extraSiblingFieldsAllowed + if allowed == nil { + allowed = make(map[string]struct{}, 0) + } + for _, ex := range extra { + if _, ok := allowed[ex]; !ok { + extras = append(extras, ex) + } + } + if len(extras) != 0 { + return fmt.Errorf("extra sibling fields: %+v", extras) + } + } + if v := x.Value; v != nil { + return v.Validate(ctx) + } + return foundUnresolvedRef(x.Ref) +} + +// JSONLookup implements github.com/go-openapi/jsonpointer#JSONPointable +func (x *ExampleRef) JSONLookup(token string) (interface{}, error) { + if token == "$ref" { + return x.Ref, nil + } + ptr, _, err := jsonpointer.GetForToken(x.Value, token) + return ptr, err +} + +// HeaderRef represents either a Header or a $ref to a Header. +// When serializing and both fields are set, Ref is preferred over Value. +type HeaderRef struct { + Ref string + Value *Header + extra []string +} + +var _ jsonpointer.JSONPointable = (*HeaderRef)(nil) + +// MarshalYAML returns the YAML encoding of HeaderRef. +func (x HeaderRef) MarshalYAML() (interface{}, error) { + if ref := x.Ref; ref != "" { + return &Ref{Ref: ref}, nil + } + return x.Value, nil +} + +// MarshalJSON returns the JSON encoding of HeaderRef. +func (x HeaderRef) MarshalJSON() ([]byte, error) { + if ref := x.Ref; ref != "" { + return json.Marshal(Ref{Ref: ref}) + } + return x.Value.MarshalJSON() +} + +// UnmarshalJSON sets HeaderRef to a copy of data. +func (x *HeaderRef) UnmarshalJSON(data []byte) error { + var refOnly Ref + if extra, err := marshmallow.Unmarshal(data, &refOnly, marshmallow.WithExcludeKnownFieldsFromMap(true)); err == nil && refOnly.Ref != "" { + x.Ref = refOnly.Ref + if len(extra) != 0 { + x.extra = make([]string, 0, len(extra)) + for key := range extra { + x.extra = append(x.extra, key) + } + sort.Strings(x.extra) + } + return nil + } + return json.Unmarshal(data, &x.Value) +} + +// Validate returns an error if HeaderRef does not comply with the OpenAPI spec. +func (x *HeaderRef) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + if extra := x.extra; len(extra) != 0 { + extras := make([]string, 0, len(extra)) + allowed := getValidationOptions(ctx).extraSiblingFieldsAllowed + if allowed == nil { + allowed = make(map[string]struct{}, 0) + } + for _, ex := range extra { + if _, ok := allowed[ex]; !ok { + extras = append(extras, ex) + } + } + if len(extras) != 0 { + return fmt.Errorf("extra sibling fields: %+v", extras) + } + } + if v := x.Value; v != nil { + return v.Validate(ctx) + } + return foundUnresolvedRef(x.Ref) +} + +// JSONLookup implements github.com/go-openapi/jsonpointer#JSONPointable +func (x *HeaderRef) JSONLookup(token string) (interface{}, error) { + if token == "$ref" { + return x.Ref, nil + } + ptr, _, err := jsonpointer.GetForToken(x.Value, token) + return ptr, err +} + +// LinkRef represents either a Link or a $ref to a Link. +// When serializing and both fields are set, Ref is preferred over Value. +type LinkRef struct { + Ref string + Value *Link + extra []string +} + +var _ jsonpointer.JSONPointable = (*LinkRef)(nil) + +// MarshalYAML returns the YAML encoding of LinkRef. +func (x LinkRef) MarshalYAML() (interface{}, error) { + if ref := x.Ref; ref != "" { + return &Ref{Ref: ref}, nil + } + return x.Value, nil +} + +// MarshalJSON returns the JSON encoding of LinkRef. +func (x LinkRef) MarshalJSON() ([]byte, error) { + if ref := x.Ref; ref != "" { + return json.Marshal(Ref{Ref: ref}) + } + return x.Value.MarshalJSON() +} + +// UnmarshalJSON sets LinkRef to a copy of data. +func (x *LinkRef) UnmarshalJSON(data []byte) error { + var refOnly Ref + if extra, err := marshmallow.Unmarshal(data, &refOnly, marshmallow.WithExcludeKnownFieldsFromMap(true)); err == nil && refOnly.Ref != "" { + x.Ref = refOnly.Ref + if len(extra) != 0 { + x.extra = make([]string, 0, len(extra)) + for key := range extra { + x.extra = append(x.extra, key) + } + sort.Strings(x.extra) + } + return nil + } + return json.Unmarshal(data, &x.Value) +} + +// Validate returns an error if LinkRef does not comply with the OpenAPI spec. +func (x *LinkRef) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + if extra := x.extra; len(extra) != 0 { + extras := make([]string, 0, len(extra)) + allowed := getValidationOptions(ctx).extraSiblingFieldsAllowed + if allowed == nil { + allowed = make(map[string]struct{}, 0) + } + for _, ex := range extra { + if _, ok := allowed[ex]; !ok { + extras = append(extras, ex) + } + } + if len(extras) != 0 { + return fmt.Errorf("extra sibling fields: %+v", extras) + } + } + if v := x.Value; v != nil { + return v.Validate(ctx) + } + return foundUnresolvedRef(x.Ref) +} + +// JSONLookup implements github.com/go-openapi/jsonpointer#JSONPointable +func (x *LinkRef) JSONLookup(token string) (interface{}, error) { + if token == "$ref" { + return x.Ref, nil + } + ptr, _, err := jsonpointer.GetForToken(x.Value, token) + return ptr, err +} + +// ParameterRef represents either a Parameter or a $ref to a Parameter. +// When serializing and both fields are set, Ref is preferred over Value. +type ParameterRef struct { + Ref string + Value *Parameter + extra []string +} + +var _ jsonpointer.JSONPointable = (*ParameterRef)(nil) + +// MarshalYAML returns the YAML encoding of ParameterRef. +func (x ParameterRef) MarshalYAML() (interface{}, error) { + if ref := x.Ref; ref != "" { + return &Ref{Ref: ref}, nil + } + return x.Value, nil +} + +// MarshalJSON returns the JSON encoding of ParameterRef. +func (x ParameterRef) MarshalJSON() ([]byte, error) { + if ref := x.Ref; ref != "" { + return json.Marshal(Ref{Ref: ref}) + } + return x.Value.MarshalJSON() +} + +// UnmarshalJSON sets ParameterRef to a copy of data. +func (x *ParameterRef) UnmarshalJSON(data []byte) error { + var refOnly Ref + if extra, err := marshmallow.Unmarshal(data, &refOnly, marshmallow.WithExcludeKnownFieldsFromMap(true)); err == nil && refOnly.Ref != "" { + x.Ref = refOnly.Ref + if len(extra) != 0 { + x.extra = make([]string, 0, len(extra)) + for key := range extra { + x.extra = append(x.extra, key) + } + sort.Strings(x.extra) + } + return nil + } + return json.Unmarshal(data, &x.Value) +} + +// Validate returns an error if ParameterRef does not comply with the OpenAPI spec. +func (x *ParameterRef) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + if extra := x.extra; len(extra) != 0 { + extras := make([]string, 0, len(extra)) + allowed := getValidationOptions(ctx).extraSiblingFieldsAllowed + if allowed == nil { + allowed = make(map[string]struct{}, 0) + } + for _, ex := range extra { + if _, ok := allowed[ex]; !ok { + extras = append(extras, ex) + } + } + if len(extras) != 0 { + return fmt.Errorf("extra sibling fields: %+v", extras) + } + } + if v := x.Value; v != nil { + return v.Validate(ctx) + } + return foundUnresolvedRef(x.Ref) +} + +// JSONLookup implements github.com/go-openapi/jsonpointer#JSONPointable +func (x *ParameterRef) JSONLookup(token string) (interface{}, error) { + if token == "$ref" { + return x.Ref, nil + } + ptr, _, err := jsonpointer.GetForToken(x.Value, token) + return ptr, err +} + +// RequestBodyRef represents either a RequestBody or a $ref to a RequestBody. +// When serializing and both fields are set, Ref is preferred over Value. +type RequestBodyRef struct { + Ref string + Value *RequestBody + extra []string +} + +var _ jsonpointer.JSONPointable = (*RequestBodyRef)(nil) + +// MarshalYAML returns the YAML encoding of RequestBodyRef. +func (x RequestBodyRef) MarshalYAML() (interface{}, error) { + if ref := x.Ref; ref != "" { + return &Ref{Ref: ref}, nil + } + return x.Value, nil +} + +// MarshalJSON returns the JSON encoding of RequestBodyRef. +func (x RequestBodyRef) MarshalJSON() ([]byte, error) { + if ref := x.Ref; ref != "" { + return json.Marshal(Ref{Ref: ref}) + } + return x.Value.MarshalJSON() +} + +// UnmarshalJSON sets RequestBodyRef to a copy of data. +func (x *RequestBodyRef) UnmarshalJSON(data []byte) error { + var refOnly Ref + if extra, err := marshmallow.Unmarshal(data, &refOnly, marshmallow.WithExcludeKnownFieldsFromMap(true)); err == nil && refOnly.Ref != "" { + x.Ref = refOnly.Ref + if len(extra) != 0 { + x.extra = make([]string, 0, len(extra)) + for key := range extra { + x.extra = append(x.extra, key) + } + sort.Strings(x.extra) + } + return nil + } + return json.Unmarshal(data, &x.Value) +} + +// Validate returns an error if RequestBodyRef does not comply with the OpenAPI spec. +func (x *RequestBodyRef) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + if extra := x.extra; len(extra) != 0 { + extras := make([]string, 0, len(extra)) + allowed := getValidationOptions(ctx).extraSiblingFieldsAllowed + if allowed == nil { + allowed = make(map[string]struct{}, 0) + } + for _, ex := range extra { + if _, ok := allowed[ex]; !ok { + extras = append(extras, ex) + } + } + if len(extras) != 0 { + return fmt.Errorf("extra sibling fields: %+v", extras) + } + } + if v := x.Value; v != nil { + return v.Validate(ctx) + } + return foundUnresolvedRef(x.Ref) +} + +// JSONLookup implements github.com/go-openapi/jsonpointer#JSONPointable +func (x *RequestBodyRef) JSONLookup(token string) (interface{}, error) { + if token == "$ref" { + return x.Ref, nil + } + ptr, _, err := jsonpointer.GetForToken(x.Value, token) + return ptr, err +} + +// ResponseRef represents either a Response or a $ref to a Response. +// When serializing and both fields are set, Ref is preferred over Value. +type ResponseRef struct { + Ref string + Value *Response + extra []string +} + +var _ jsonpointer.JSONPointable = (*ResponseRef)(nil) + +// MarshalYAML returns the YAML encoding of ResponseRef. +func (x ResponseRef) MarshalYAML() (interface{}, error) { + if ref := x.Ref; ref != "" { + return &Ref{Ref: ref}, nil + } + return x.Value, nil +} + +// MarshalJSON returns the JSON encoding of ResponseRef. +func (x ResponseRef) MarshalJSON() ([]byte, error) { + if ref := x.Ref; ref != "" { + return json.Marshal(Ref{Ref: ref}) + } + return x.Value.MarshalJSON() +} + +// UnmarshalJSON sets ResponseRef to a copy of data. +func (x *ResponseRef) UnmarshalJSON(data []byte) error { + var refOnly Ref + if extra, err := marshmallow.Unmarshal(data, &refOnly, marshmallow.WithExcludeKnownFieldsFromMap(true)); err == nil && refOnly.Ref != "" { + x.Ref = refOnly.Ref + if len(extra) != 0 { + x.extra = make([]string, 0, len(extra)) + for key := range extra { + x.extra = append(x.extra, key) + } + sort.Strings(x.extra) + } + return nil + } + return json.Unmarshal(data, &x.Value) +} + +// Validate returns an error if ResponseRef does not comply with the OpenAPI spec. +func (x *ResponseRef) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + if extra := x.extra; len(extra) != 0 { + extras := make([]string, 0, len(extra)) + allowed := getValidationOptions(ctx).extraSiblingFieldsAllowed + if allowed == nil { + allowed = make(map[string]struct{}, 0) + } + for _, ex := range extra { + if _, ok := allowed[ex]; !ok { + extras = append(extras, ex) + } + } + if len(extras) != 0 { + return fmt.Errorf("extra sibling fields: %+v", extras) + } + } + if v := x.Value; v != nil { + return v.Validate(ctx) + } + return foundUnresolvedRef(x.Ref) +} + +// JSONLookup implements github.com/go-openapi/jsonpointer#JSONPointable +func (x *ResponseRef) JSONLookup(token string) (interface{}, error) { + if token == "$ref" { + return x.Ref, nil + } + ptr, _, err := jsonpointer.GetForToken(x.Value, token) + return ptr, err +} + +// SchemaRef represents either a Schema or a $ref to a Schema. +// When serializing and both fields are set, Ref is preferred over Value. +type SchemaRef struct { + Ref string + Value *Schema + extra []string +} + +var _ jsonpointer.JSONPointable = (*SchemaRef)(nil) + +// MarshalYAML returns the YAML encoding of SchemaRef. +func (x SchemaRef) MarshalYAML() (interface{}, error) { + if ref := x.Ref; ref != "" { + return &Ref{Ref: ref}, nil + } + return x.Value, nil +} + +// MarshalJSON returns the JSON encoding of SchemaRef. +func (x SchemaRef) MarshalJSON() ([]byte, error) { + if ref := x.Ref; ref != "" { + return json.Marshal(Ref{Ref: ref}) + } + return x.Value.MarshalJSON() +} + +// UnmarshalJSON sets SchemaRef to a copy of data. +func (x *SchemaRef) UnmarshalJSON(data []byte) error { + var refOnly Ref + if extra, err := marshmallow.Unmarshal(data, &refOnly, marshmallow.WithExcludeKnownFieldsFromMap(true)); err == nil && refOnly.Ref != "" { + x.Ref = refOnly.Ref + if len(extra) != 0 { + x.extra = make([]string, 0, len(extra)) + for key := range extra { + x.extra = append(x.extra, key) + } + sort.Strings(x.extra) + } + return nil + } + return json.Unmarshal(data, &x.Value) +} + +// Validate returns an error if SchemaRef does not comply with the OpenAPI spec. +func (x *SchemaRef) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + if extra := x.extra; len(extra) != 0 { + extras := make([]string, 0, len(extra)) + allowed := getValidationOptions(ctx).extraSiblingFieldsAllowed + if allowed == nil { + allowed = make(map[string]struct{}, 0) + } + for _, ex := range extra { + if _, ok := allowed[ex]; !ok { + extras = append(extras, ex) + } + } + if len(extras) != 0 { + return fmt.Errorf("extra sibling fields: %+v", extras) + } + } + if v := x.Value; v != nil { + return v.Validate(ctx) + } + return foundUnresolvedRef(x.Ref) +} + +// JSONLookup implements github.com/go-openapi/jsonpointer#JSONPointable +func (x *SchemaRef) JSONLookup(token string) (interface{}, error) { + if token == "$ref" { + return x.Ref, nil + } + ptr, _, err := jsonpointer.GetForToken(x.Value, token) + return ptr, err +} + +// SecuritySchemeRef represents either a SecurityScheme or a $ref to a SecurityScheme. +// When serializing and both fields are set, Ref is preferred over Value. +type SecuritySchemeRef struct { + Ref string + Value *SecurityScheme + extra []string +} + +var _ jsonpointer.JSONPointable = (*SecuritySchemeRef)(nil) + +// MarshalYAML returns the YAML encoding of SecuritySchemeRef. +func (x SecuritySchemeRef) MarshalYAML() (interface{}, error) { + if ref := x.Ref; ref != "" { + return &Ref{Ref: ref}, nil + } + return x.Value, nil +} + +// MarshalJSON returns the JSON encoding of SecuritySchemeRef. +func (x SecuritySchemeRef) MarshalJSON() ([]byte, error) { + if ref := x.Ref; ref != "" { + return json.Marshal(Ref{Ref: ref}) + } + return x.Value.MarshalJSON() +} + +// UnmarshalJSON sets SecuritySchemeRef to a copy of data. +func (x *SecuritySchemeRef) UnmarshalJSON(data []byte) error { + var refOnly Ref + if extra, err := marshmallow.Unmarshal(data, &refOnly, marshmallow.WithExcludeKnownFieldsFromMap(true)); err == nil && refOnly.Ref != "" { + x.Ref = refOnly.Ref + if len(extra) != 0 { + x.extra = make([]string, 0, len(extra)) + for key := range extra { + x.extra = append(x.extra, key) + } + sort.Strings(x.extra) + } + return nil + } + return json.Unmarshal(data, &x.Value) +} + +// Validate returns an error if SecuritySchemeRef does not comply with the OpenAPI spec. +func (x *SecuritySchemeRef) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + if extra := x.extra; len(extra) != 0 { + extras := make([]string, 0, len(extra)) + allowed := getValidationOptions(ctx).extraSiblingFieldsAllowed + if allowed == nil { + allowed = make(map[string]struct{}, 0) + } + for _, ex := range extra { + if _, ok := allowed[ex]; !ok { + extras = append(extras, ex) + } + } + if len(extras) != 0 { + return fmt.Errorf("extra sibling fields: %+v", extras) + } + } + if v := x.Value; v != nil { + return v.Validate(ctx) + } + return foundUnresolvedRef(x.Ref) +} + +// JSONLookup implements github.com/go-openapi/jsonpointer#JSONPointable +func (x *SecuritySchemeRef) JSONLookup(token string) (interface{}, error) { + if token == "$ref" { + return x.Ref, nil + } + ptr, _, err := jsonpointer.GetForToken(x.Value, token) + return ptr, err +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/request_body.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/request_body.go new file mode 100644 index 00000000..de8919f4 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/request_body.go @@ -0,0 +1,146 @@ +package openapi3 + +import ( + "context" + "encoding/json" + "errors" + "fmt" + + "github.com/go-openapi/jsonpointer" +) + +type RequestBodies map[string]*RequestBodyRef + +var _ jsonpointer.JSONPointable = (*RequestBodyRef)(nil) + +// JSONLookup implements github.com/go-openapi/jsonpointer#JSONPointable +func (r RequestBodies) JSONLookup(token string) (interface{}, error) { + ref, ok := r[token] + if ok == false { + return nil, fmt.Errorf("object has no field %q", token) + } + + if ref != nil && ref.Ref != "" { + return &Ref{Ref: ref.Ref}, nil + } + return ref.Value, nil +} + +// RequestBody is specified by OpenAPI/Swagger 3.0 standard. +// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#request-body-object +type RequestBody struct { + Extensions map[string]interface{} `json:"-" yaml:"-"` + + Description string `json:"description,omitempty" yaml:"description,omitempty"` + Required bool `json:"required,omitempty" yaml:"required,omitempty"` + Content Content `json:"content" yaml:"content"` +} + +func NewRequestBody() *RequestBody { + return &RequestBody{} +} + +func (requestBody *RequestBody) WithDescription(value string) *RequestBody { + requestBody.Description = value + return requestBody +} + +func (requestBody *RequestBody) WithRequired(value bool) *RequestBody { + requestBody.Required = value + return requestBody +} + +func (requestBody *RequestBody) WithContent(content Content) *RequestBody { + requestBody.Content = content + return requestBody +} + +func (requestBody *RequestBody) WithSchemaRef(value *SchemaRef, consumes []string) *RequestBody { + requestBody.Content = NewContentWithSchemaRef(value, consumes) + return requestBody +} + +func (requestBody *RequestBody) WithSchema(value *Schema, consumes []string) *RequestBody { + requestBody.Content = NewContentWithSchema(value, consumes) + return requestBody +} + +func (requestBody *RequestBody) WithJSONSchemaRef(value *SchemaRef) *RequestBody { + requestBody.Content = NewContentWithJSONSchemaRef(value) + return requestBody +} + +func (requestBody *RequestBody) WithJSONSchema(value *Schema) *RequestBody { + requestBody.Content = NewContentWithJSONSchema(value) + return requestBody +} + +func (requestBody *RequestBody) WithFormDataSchemaRef(value *SchemaRef) *RequestBody { + requestBody.Content = NewContentWithFormDataSchemaRef(value) + return requestBody +} + +func (requestBody *RequestBody) WithFormDataSchema(value *Schema) *RequestBody { + requestBody.Content = NewContentWithFormDataSchema(value) + return requestBody +} + +func (requestBody *RequestBody) GetMediaType(mediaType string) *MediaType { + m := requestBody.Content + if m == nil { + return nil + } + return m[mediaType] +} + +// MarshalJSON returns the JSON encoding of RequestBody. +func (requestBody RequestBody) MarshalJSON() ([]byte, error) { + m := make(map[string]interface{}, 3+len(requestBody.Extensions)) + for k, v := range requestBody.Extensions { + m[k] = v + } + if x := requestBody.Description; x != "" { + m["description"] = requestBody.Description + } + if x := requestBody.Required; x { + m["required"] = x + } + if x := requestBody.Content; true { + m["content"] = x + } + return json.Marshal(m) +} + +// UnmarshalJSON sets RequestBody to a copy of data. +func (requestBody *RequestBody) UnmarshalJSON(data []byte) error { + type RequestBodyBis RequestBody + var x RequestBodyBis + if err := json.Unmarshal(data, &x); err != nil { + return err + } + _ = json.Unmarshal(data, &x.Extensions) + delete(x.Extensions, "description") + delete(x.Extensions, "required") + delete(x.Extensions, "content") + *requestBody = RequestBody(x) + return nil +} + +// Validate returns an error if RequestBody does not comply with the OpenAPI spec. +func (requestBody *RequestBody) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + + if requestBody.Content == nil { + return errors.New("content of the request body is required") + } + + if vo := getValidationOptions(ctx); !vo.examplesValidationDisabled { + vo.examplesValidationAsReq, vo.examplesValidationAsRes = true, false + } + + if err := requestBody.Content.Validate(ctx); err != nil { + return err + } + + return validateExtensions(ctx, requestBody.Extensions) +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/response.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/response.go new file mode 100644 index 00000000..b85c9145 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/response.go @@ -0,0 +1,183 @@ +package openapi3 + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "sort" + "strconv" + + "github.com/go-openapi/jsonpointer" +) + +// Responses is specified by OpenAPI/Swagger 3.0 standard. +// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#responses-object +type Responses map[string]*ResponseRef + +var _ jsonpointer.JSONPointable = (*Responses)(nil) + +func NewResponses() Responses { + r := make(Responses) + r["default"] = &ResponseRef{Value: NewResponse().WithDescription("")} + return r +} + +func (responses Responses) Default() *ResponseRef { + return responses["default"] +} + +func (responses Responses) Get(status int) *ResponseRef { + return responses[strconv.FormatInt(int64(status), 10)] +} + +// Validate returns an error if Responses does not comply with the OpenAPI spec. +func (responses Responses) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + + if len(responses) == 0 { + return errors.New("the responses object MUST contain at least one response code") + } + + keys := make([]string, 0, len(responses)) + for key := range responses { + keys = append(keys, key) + } + sort.Strings(keys) + for _, key := range keys { + v := responses[key] + if err := v.Validate(ctx); err != nil { + return err + } + } + return nil +} + +// JSONLookup implements github.com/go-openapi/jsonpointer#JSONPointable +func (responses Responses) JSONLookup(token string) (interface{}, error) { + ref, ok := responses[token] + if ok == false { + return nil, fmt.Errorf("invalid token reference: %q", token) + } + + if ref != nil && ref.Ref != "" { + return &Ref{Ref: ref.Ref}, nil + } + return ref.Value, nil +} + +// Response is specified by OpenAPI/Swagger 3.0 standard. +// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#response-object +type Response struct { + Extensions map[string]interface{} `json:"-" yaml:"-"` + + Description *string `json:"description,omitempty" yaml:"description,omitempty"` + Headers Headers `json:"headers,omitempty" yaml:"headers,omitempty"` + Content Content `json:"content,omitempty" yaml:"content,omitempty"` + Links Links `json:"links,omitempty" yaml:"links,omitempty"` +} + +func NewResponse() *Response { + return &Response{} +} + +func (response *Response) WithDescription(value string) *Response { + response.Description = &value + return response +} + +func (response *Response) WithContent(content Content) *Response { + response.Content = content + return response +} + +func (response *Response) WithJSONSchema(schema *Schema) *Response { + response.Content = NewContentWithJSONSchema(schema) + return response +} + +func (response *Response) WithJSONSchemaRef(schema *SchemaRef) *Response { + response.Content = NewContentWithJSONSchemaRef(schema) + return response +} + +// MarshalJSON returns the JSON encoding of Response. +func (response Response) MarshalJSON() ([]byte, error) { + m := make(map[string]interface{}, 4+len(response.Extensions)) + for k, v := range response.Extensions { + m[k] = v + } + if x := response.Description; x != nil { + m["description"] = x + } + if x := response.Headers; len(x) != 0 { + m["headers"] = x + } + if x := response.Content; len(x) != 0 { + m["content"] = x + } + if x := response.Links; len(x) != 0 { + m["links"] = x + } + return json.Marshal(m) +} + +// UnmarshalJSON sets Response to a copy of data. +func (response *Response) UnmarshalJSON(data []byte) error { + type ResponseBis Response + var x ResponseBis + if err := json.Unmarshal(data, &x); err != nil { + return err + } + _ = json.Unmarshal(data, &x.Extensions) + delete(x.Extensions, "description") + delete(x.Extensions, "headers") + delete(x.Extensions, "content") + delete(x.Extensions, "links") + *response = Response(x) + return nil +} + +// Validate returns an error if Response does not comply with the OpenAPI spec. +func (response *Response) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + + if response.Description == nil { + return errors.New("a short description of the response is required") + } + if vo := getValidationOptions(ctx); !vo.examplesValidationDisabled { + vo.examplesValidationAsReq, vo.examplesValidationAsRes = false, true + } + + if content := response.Content; content != nil { + if err := content.Validate(ctx); err != nil { + return err + } + } + + headers := make([]string, 0, len(response.Headers)) + for name := range response.Headers { + headers = append(headers, name) + } + sort.Strings(headers) + for _, name := range headers { + header := response.Headers[name] + if err := header.Validate(ctx); err != nil { + return err + } + } + + links := make([]string, 0, len(response.Links)) + for name := range response.Links { + links = append(links, name) + } + sort.Strings(links) + for _, name := range links { + link := response.Links[name] + if err := link.Validate(ctx); err != nil { + return err + } + } + + return validateExtensions(ctx, response.Extensions) +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/schema.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/schema.go new file mode 100644 index 00000000..4bfbca0b --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/schema.go @@ -0,0 +1,2121 @@ +package openapi3 + +import ( + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "math" + "math/big" + "reflect" + "regexp" + "sort" + "strconv" + "strings" + "unicode/utf16" + + "github.com/go-openapi/jsonpointer" + "github.com/mohae/deepcopy" +) + +const ( + TypeArray = "array" + TypeBoolean = "boolean" + TypeInteger = "integer" + TypeNumber = "number" + TypeObject = "object" + TypeString = "string" + + // constants for integer formats + formatMinInt32 = float64(math.MinInt32) + formatMaxInt32 = float64(math.MaxInt32) + formatMinInt64 = float64(math.MinInt64) + formatMaxInt64 = float64(math.MaxInt64) +) + +var ( + // SchemaErrorDetailsDisabled disables printing of details about schema errors. + SchemaErrorDetailsDisabled = false + + errSchema = errors.New("input does not match the schema") + + // ErrOneOfConflict is the SchemaError Origin when data matches more than one oneOf schema + ErrOneOfConflict = errors.New("input matches more than one oneOf schemas") + + // ErrSchemaInputNaN may be returned when validating a number + ErrSchemaInputNaN = errors.New("floating point NaN is not allowed") + // ErrSchemaInputInf may be returned when validating a number + ErrSchemaInputInf = errors.New("floating point Inf is not allowed") +) + +// Float64Ptr is a helper for defining OpenAPI schemas. +func Float64Ptr(value float64) *float64 { + return &value +} + +// BoolPtr is a helper for defining OpenAPI schemas. +func BoolPtr(value bool) *bool { + return &value +} + +// Int64Ptr is a helper for defining OpenAPI schemas. +func Int64Ptr(value int64) *int64 { + return &value +} + +// Uint64Ptr is a helper for defining OpenAPI schemas. +func Uint64Ptr(value uint64) *uint64 { + return &value +} + +// NewSchemaRef simply builds a SchemaRef +func NewSchemaRef(ref string, value *Schema) *SchemaRef { + return &SchemaRef{ + Ref: ref, + Value: value, + } +} + +type Schemas map[string]*SchemaRef + +var _ jsonpointer.JSONPointable = (*Schemas)(nil) + +// JSONLookup implements github.com/go-openapi/jsonpointer#JSONPointable +func (s Schemas) JSONLookup(token string) (interface{}, error) { + ref, ok := s[token] + if ref == nil || ok == false { + return nil, fmt.Errorf("object has no field %q", token) + } + + if ref.Ref != "" { + return &Ref{Ref: ref.Ref}, nil + } + return ref.Value, nil +} + +type SchemaRefs []*SchemaRef + +var _ jsonpointer.JSONPointable = (*SchemaRefs)(nil) + +// JSONLookup implements github.com/go-openapi/jsonpointer#JSONPointable +func (s SchemaRefs) JSONLookup(token string) (interface{}, error) { + i, err := strconv.ParseUint(token, 10, 64) + if err != nil { + return nil, err + } + + if i >= uint64(len(s)) { + return nil, fmt.Errorf("index out of range: %d", i) + } + + ref := s[i] + + if ref == nil || ref.Ref != "" { + return &Ref{Ref: ref.Ref}, nil + } + return ref.Value, nil +} + +// Schema is specified by OpenAPI/Swagger 3.0 standard. +// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#schema-object +type Schema struct { + Extensions map[string]interface{} `json:"-" yaml:"-"` + + OneOf SchemaRefs `json:"oneOf,omitempty" yaml:"oneOf,omitempty"` + AnyOf SchemaRefs `json:"anyOf,omitempty" yaml:"anyOf,omitempty"` + AllOf SchemaRefs `json:"allOf,omitempty" yaml:"allOf,omitempty"` + Not *SchemaRef `json:"not,omitempty" yaml:"not,omitempty"` + Type string `json:"type,omitempty" yaml:"type,omitempty"` + Title string `json:"title,omitempty" yaml:"title,omitempty"` + Format string `json:"format,omitempty" yaml:"format,omitempty"` + Description string `json:"description,omitempty" yaml:"description,omitempty"` + Enum []interface{} `json:"enum,omitempty" yaml:"enum,omitempty"` + Default interface{} `json:"default,omitempty" yaml:"default,omitempty"` + Example interface{} `json:"example,omitempty" yaml:"example,omitempty"` + ExternalDocs *ExternalDocs `json:"externalDocs,omitempty" yaml:"externalDocs,omitempty"` + + // Array-related, here for struct compactness + UniqueItems bool `json:"uniqueItems,omitempty" yaml:"uniqueItems,omitempty"` + // Number-related, here for struct compactness + ExclusiveMin bool `json:"exclusiveMinimum,omitempty" yaml:"exclusiveMinimum,omitempty"` + ExclusiveMax bool `json:"exclusiveMaximum,omitempty" yaml:"exclusiveMaximum,omitempty"` + // Properties + Nullable bool `json:"nullable,omitempty" yaml:"nullable,omitempty"` + ReadOnly bool `json:"readOnly,omitempty" yaml:"readOnly,omitempty"` + WriteOnly bool `json:"writeOnly,omitempty" yaml:"writeOnly,omitempty"` + AllowEmptyValue bool `json:"allowEmptyValue,omitempty" yaml:"allowEmptyValue,omitempty"` + Deprecated bool `json:"deprecated,omitempty" yaml:"deprecated,omitempty"` + XML *XML `json:"xml,omitempty" yaml:"xml,omitempty"` + + // Number + Min *float64 `json:"minimum,omitempty" yaml:"minimum,omitempty"` + Max *float64 `json:"maximum,omitempty" yaml:"maximum,omitempty"` + MultipleOf *float64 `json:"multipleOf,omitempty" yaml:"multipleOf,omitempty"` + + // String + MinLength uint64 `json:"minLength,omitempty" yaml:"minLength,omitempty"` + MaxLength *uint64 `json:"maxLength,omitempty" yaml:"maxLength,omitempty"` + Pattern string `json:"pattern,omitempty" yaml:"pattern,omitempty"` + compiledPattern *regexp.Regexp + + // Array + MinItems uint64 `json:"minItems,omitempty" yaml:"minItems,omitempty"` + MaxItems *uint64 `json:"maxItems,omitempty" yaml:"maxItems,omitempty"` + Items *SchemaRef `json:"items,omitempty" yaml:"items,omitempty"` + + // Object + Required []string `json:"required,omitempty" yaml:"required,omitempty"` + Properties Schemas `json:"properties,omitempty" yaml:"properties,omitempty"` + MinProps uint64 `json:"minProperties,omitempty" yaml:"minProperties,omitempty"` + MaxProps *uint64 `json:"maxProperties,omitempty" yaml:"maxProperties,omitempty"` + AdditionalProperties AdditionalProperties `json:"additionalProperties,omitempty" yaml:"additionalProperties,omitempty"` + Discriminator *Discriminator `json:"discriminator,omitempty" yaml:"discriminator,omitempty"` +} + +type AdditionalProperties struct { + Has *bool + Schema *SchemaRef +} + +// MarshalJSON returns the JSON encoding of AdditionalProperties. +func (addProps AdditionalProperties) MarshalJSON() ([]byte, error) { + if x := addProps.Has; x != nil { + if *x { + return []byte("true"), nil + } + return []byte("false"), nil + } + if x := addProps.Schema; x != nil { + return json.Marshal(x) + } + return nil, nil +} + +// UnmarshalJSON sets AdditionalProperties to a copy of data. +func (addProps *AdditionalProperties) UnmarshalJSON(data []byte) error { + var x interface{} + if err := json.Unmarshal(data, &x); err != nil { + return err + } + switch y := x.(type) { + case nil: + case bool: + addProps.Has = &y + case map[string]interface{}: + if len(y) == 0 { + addProps.Schema = &SchemaRef{Value: &Schema{}} + } else { + buf := new(bytes.Buffer) + json.NewEncoder(buf).Encode(y) + if err := json.NewDecoder(buf).Decode(&addProps.Schema); err != nil { + return err + } + } + default: + return errors.New("cannot unmarshal additionalProperties: value must be either a schema object or a boolean") + } + return nil +} + +var _ jsonpointer.JSONPointable = (*Schema)(nil) + +func NewSchema() *Schema { + return &Schema{} +} + +// MarshalJSON returns the JSON encoding of Schema. +func (schema Schema) MarshalJSON() ([]byte, error) { + m := make(map[string]interface{}, 36+len(schema.Extensions)) + for k, v := range schema.Extensions { + m[k] = v + } + + if x := schema.OneOf; len(x) != 0 { + m["oneOf"] = x + } + if x := schema.AnyOf; len(x) != 0 { + m["anyOf"] = x + } + if x := schema.AllOf; len(x) != 0 { + m["allOf"] = x + } + if x := schema.Not; x != nil { + m["not"] = x + } + if x := schema.Type; len(x) != 0 { + m["type"] = x + } + if x := schema.Title; len(x) != 0 { + m["title"] = x + } + if x := schema.Format; len(x) != 0 { + m["format"] = x + } + if x := schema.Description; len(x) != 0 { + m["description"] = x + } + if x := schema.Enum; len(x) != 0 { + m["enum"] = x + } + if x := schema.Default; x != nil { + m["default"] = x + } + if x := schema.Example; x != nil { + m["example"] = x + } + if x := schema.ExternalDocs; x != nil { + m["externalDocs"] = x + } + + // Array-related + if x := schema.UniqueItems; x { + m["uniqueItems"] = x + } + // Number-related + if x := schema.ExclusiveMin; x { + m["exclusiveMinimum"] = x + } + if x := schema.ExclusiveMax; x { + m["exclusiveMaximum"] = x + } + // Properties + if x := schema.Nullable; x { + m["nullable"] = x + } + if x := schema.ReadOnly; x { + m["readOnly"] = x + } + if x := schema.WriteOnly; x { + m["writeOnly"] = x + } + if x := schema.AllowEmptyValue; x { + m["allowEmptyValue"] = x + } + if x := schema.Deprecated; x { + m["deprecated"] = x + } + if x := schema.XML; x != nil { + m["xml"] = x + } + + // Number + if x := schema.Min; x != nil { + m["minimum"] = x + } + if x := schema.Max; x != nil { + m["maximum"] = x + } + if x := schema.MultipleOf; x != nil { + m["multipleOf"] = x + } + + // String + if x := schema.MinLength; x != 0 { + m["minLength"] = x + } + if x := schema.MaxLength; x != nil { + m["maxLength"] = x + } + if x := schema.Pattern; x != "" { + m["pattern"] = x + } + + // Array + if x := schema.MinItems; x != 0 { + m["minItems"] = x + } + if x := schema.MaxItems; x != nil { + m["maxItems"] = x + } + if x := schema.Items; x != nil { + m["items"] = x + } + + // Object + if x := schema.Required; len(x) != 0 { + m["required"] = x + } + if x := schema.Properties; len(x) != 0 { + m["properties"] = x + } + if x := schema.MinProps; x != 0 { + m["minProperties"] = x + } + if x := schema.MaxProps; x != nil { + m["maxProperties"] = x + } + if x := schema.AdditionalProperties; x.Has != nil || x.Schema != nil { + m["additionalProperties"] = &x + } + if x := schema.Discriminator; x != nil { + m["discriminator"] = x + } + + return json.Marshal(m) +} + +// UnmarshalJSON sets Schema to a copy of data. +func (schema *Schema) UnmarshalJSON(data []byte) error { + type SchemaBis Schema + var x SchemaBis + if err := json.Unmarshal(data, &x); err != nil { + return err + } + _ = json.Unmarshal(data, &x.Extensions) + + delete(x.Extensions, "oneOf") + delete(x.Extensions, "anyOf") + delete(x.Extensions, "allOf") + delete(x.Extensions, "not") + delete(x.Extensions, "type") + delete(x.Extensions, "title") + delete(x.Extensions, "format") + delete(x.Extensions, "description") + delete(x.Extensions, "enum") + delete(x.Extensions, "default") + delete(x.Extensions, "example") + delete(x.Extensions, "externalDocs") + + // Array-related + delete(x.Extensions, "uniqueItems") + // Number-related + delete(x.Extensions, "exclusiveMinimum") + delete(x.Extensions, "exclusiveMaximum") + // Properties + delete(x.Extensions, "nullable") + delete(x.Extensions, "readOnly") + delete(x.Extensions, "writeOnly") + delete(x.Extensions, "allowEmptyValue") + delete(x.Extensions, "deprecated") + delete(x.Extensions, "xml") + + // Number + delete(x.Extensions, "minimum") + delete(x.Extensions, "maximum") + delete(x.Extensions, "multipleOf") + + // String + delete(x.Extensions, "minLength") + delete(x.Extensions, "maxLength") + delete(x.Extensions, "pattern") + + // Array + delete(x.Extensions, "minItems") + delete(x.Extensions, "maxItems") + delete(x.Extensions, "items") + + // Object + delete(x.Extensions, "required") + delete(x.Extensions, "properties") + delete(x.Extensions, "minProperties") + delete(x.Extensions, "maxProperties") + delete(x.Extensions, "additionalProperties") + delete(x.Extensions, "discriminator") + + *schema = Schema(x) + + if schema.Format == "date" { + // This is a fix for: https://github.com/getkin/kin-openapi/issues/697 + if eg, ok := schema.Example.(string); ok { + schema.Example = strings.TrimSuffix(eg, "T00:00:00Z") + } + } + return nil +} + +// JSONLookup implements github.com/go-openapi/jsonpointer#JSONPointable +func (schema Schema) JSONLookup(token string) (interface{}, error) { + switch token { + case "additionalProperties": + if addProps := schema.AdditionalProperties.Has; addProps != nil { + return *addProps, nil + } + if addProps := schema.AdditionalProperties.Schema; addProps != nil { + if addProps.Ref != "" { + return &Ref{Ref: addProps.Ref}, nil + } + return addProps.Value, nil + } + case "not": + if schema.Not != nil { + if schema.Not.Ref != "" { + return &Ref{Ref: schema.Not.Ref}, nil + } + return schema.Not.Value, nil + } + case "items": + if schema.Items != nil { + if schema.Items.Ref != "" { + return &Ref{Ref: schema.Items.Ref}, nil + } + return schema.Items.Value, nil + } + case "oneOf": + return schema.OneOf, nil + case "anyOf": + return schema.AnyOf, nil + case "allOf": + return schema.AllOf, nil + case "type": + return schema.Type, nil + case "title": + return schema.Title, nil + case "format": + return schema.Format, nil + case "description": + return schema.Description, nil + case "enum": + return schema.Enum, nil + case "default": + return schema.Default, nil + case "example": + return schema.Example, nil + case "externalDocs": + return schema.ExternalDocs, nil + case "uniqueItems": + return schema.UniqueItems, nil + case "exclusiveMin": + return schema.ExclusiveMin, nil + case "exclusiveMax": + return schema.ExclusiveMax, nil + case "nullable": + return schema.Nullable, nil + case "readOnly": + return schema.ReadOnly, nil + case "writeOnly": + return schema.WriteOnly, nil + case "allowEmptyValue": + return schema.AllowEmptyValue, nil + case "xml": + return schema.XML, nil + case "deprecated": + return schema.Deprecated, nil + case "min": + return schema.Min, nil + case "max": + return schema.Max, nil + case "multipleOf": + return schema.MultipleOf, nil + case "minLength": + return schema.MinLength, nil + case "maxLength": + return schema.MaxLength, nil + case "pattern": + return schema.Pattern, nil + case "minItems": + return schema.MinItems, nil + case "maxItems": + return schema.MaxItems, nil + case "required": + return schema.Required, nil + case "properties": + return schema.Properties, nil + case "minProps": + return schema.MinProps, nil + case "maxProps": + return schema.MaxProps, nil + case "discriminator": + return schema.Discriminator, nil + } + + v, _, err := jsonpointer.GetForToken(schema.Extensions, token) + return v, err +} + +func (schema *Schema) NewRef() *SchemaRef { + return &SchemaRef{ + Value: schema, + } +} + +func NewOneOfSchema(schemas ...*Schema) *Schema { + refs := make([]*SchemaRef, 0, len(schemas)) + for _, schema := range schemas { + refs = append(refs, &SchemaRef{Value: schema}) + } + return &Schema{ + OneOf: refs, + } +} + +func NewAnyOfSchema(schemas ...*Schema) *Schema { + refs := make([]*SchemaRef, 0, len(schemas)) + for _, schema := range schemas { + refs = append(refs, &SchemaRef{Value: schema}) + } + return &Schema{ + AnyOf: refs, + } +} + +func NewAllOfSchema(schemas ...*Schema) *Schema { + refs := make([]*SchemaRef, 0, len(schemas)) + for _, schema := range schemas { + refs = append(refs, &SchemaRef{Value: schema}) + } + return &Schema{ + AllOf: refs, + } +} + +func NewBoolSchema() *Schema { + return &Schema{ + Type: TypeBoolean, + } +} + +func NewFloat64Schema() *Schema { + return &Schema{ + Type: TypeNumber, + } +} + +func NewIntegerSchema() *Schema { + return &Schema{ + Type: TypeInteger, + } +} + +func NewInt32Schema() *Schema { + return &Schema{ + Type: TypeInteger, + Format: "int32", + } +} + +func NewInt64Schema() *Schema { + return &Schema{ + Type: TypeInteger, + Format: "int64", + } +} + +func NewStringSchema() *Schema { + return &Schema{ + Type: TypeString, + } +} + +func NewDateTimeSchema() *Schema { + return &Schema{ + Type: TypeString, + Format: "date-time", + } +} + +func NewUUIDSchema() *Schema { + return &Schema{ + Type: TypeString, + Format: "uuid", + } +} + +func NewBytesSchema() *Schema { + return &Schema{ + Type: TypeString, + Format: "byte", + } +} + +func NewArraySchema() *Schema { + return &Schema{ + Type: TypeArray, + } +} + +func NewObjectSchema() *Schema { + return &Schema{ + Type: TypeObject, + Properties: make(Schemas), + } +} + +func (schema *Schema) WithNullable() *Schema { + schema.Nullable = true + return schema +} + +func (schema *Schema) WithMin(value float64) *Schema { + schema.Min = &value + return schema +} + +func (schema *Schema) WithMax(value float64) *Schema { + schema.Max = &value + return schema +} + +func (schema *Schema) WithExclusiveMin(value bool) *Schema { + schema.ExclusiveMin = value + return schema +} + +func (schema *Schema) WithExclusiveMax(value bool) *Schema { + schema.ExclusiveMax = value + return schema +} + +func (schema *Schema) WithEnum(values ...interface{}) *Schema { + schema.Enum = values + return schema +} + +func (schema *Schema) WithDefault(defaultValue interface{}) *Schema { + schema.Default = defaultValue + return schema +} + +func (schema *Schema) WithFormat(value string) *Schema { + schema.Format = value + return schema +} + +func (schema *Schema) WithLength(i int64) *Schema { + n := uint64(i) + schema.MinLength = n + schema.MaxLength = &n + return schema +} + +func (schema *Schema) WithMinLength(i int64) *Schema { + n := uint64(i) + schema.MinLength = n + return schema +} + +func (schema *Schema) WithMaxLength(i int64) *Schema { + n := uint64(i) + schema.MaxLength = &n + return schema +} + +func (schema *Schema) WithLengthDecodedBase64(i int64) *Schema { + n := uint64(i) + v := (n*8 + 5) / 6 + schema.MinLength = v + schema.MaxLength = &v + return schema +} + +func (schema *Schema) WithMinLengthDecodedBase64(i int64) *Schema { + n := uint64(i) + schema.MinLength = (n*8 + 5) / 6 + return schema +} + +func (schema *Schema) WithMaxLengthDecodedBase64(i int64) *Schema { + n := uint64(i) + schema.MinLength = (n*8 + 5) / 6 + return schema +} + +func (schema *Schema) WithPattern(pattern string) *Schema { + schema.Pattern = pattern + schema.compiledPattern = nil + return schema +} + +func (schema *Schema) WithItems(value *Schema) *Schema { + schema.Items = &SchemaRef{ + Value: value, + } + return schema +} + +func (schema *Schema) WithMinItems(i int64) *Schema { + n := uint64(i) + schema.MinItems = n + return schema +} + +func (schema *Schema) WithMaxItems(i int64) *Schema { + n := uint64(i) + schema.MaxItems = &n + return schema +} + +func (schema *Schema) WithUniqueItems(unique bool) *Schema { + schema.UniqueItems = unique + return schema +} + +func (schema *Schema) WithProperty(name string, propertySchema *Schema) *Schema { + return schema.WithPropertyRef(name, &SchemaRef{ + Value: propertySchema, + }) +} + +func (schema *Schema) WithPropertyRef(name string, ref *SchemaRef) *Schema { + properties := schema.Properties + if properties == nil { + properties = make(Schemas) + schema.Properties = properties + } + properties[name] = ref + return schema +} + +func (schema *Schema) WithProperties(properties map[string]*Schema) *Schema { + result := make(Schemas, len(properties)) + for k, v := range properties { + result[k] = &SchemaRef{ + Value: v, + } + } + schema.Properties = result + return schema +} + +func (schema *Schema) WithMinProperties(i int64) *Schema { + n := uint64(i) + schema.MinProps = n + return schema +} + +func (schema *Schema) WithMaxProperties(i int64) *Schema { + n := uint64(i) + schema.MaxProps = &n + return schema +} + +func (schema *Schema) WithAnyAdditionalProperties() *Schema { + schema.AdditionalProperties = AdditionalProperties{Has: BoolPtr(true)} + return schema +} + +func (schema *Schema) WithoutAdditionalProperties() *Schema { + schema.AdditionalProperties = AdditionalProperties{Has: BoolPtr(false)} + return schema +} + +func (schema *Schema) WithAdditionalProperties(v *Schema) *Schema { + schema.AdditionalProperties = AdditionalProperties{} + if v != nil { + schema.AdditionalProperties.Schema = &SchemaRef{Value: v} + } + return schema +} + +// IsEmpty tells whether schema is equivalent to the empty schema `{}`. +func (schema *Schema) IsEmpty() bool { + if schema.Type != "" || schema.Format != "" || len(schema.Enum) != 0 || + schema.UniqueItems || schema.ExclusiveMin || schema.ExclusiveMax || + schema.Nullable || schema.ReadOnly || schema.WriteOnly || schema.AllowEmptyValue || + schema.Min != nil || schema.Max != nil || schema.MultipleOf != nil || + schema.MinLength != 0 || schema.MaxLength != nil || schema.Pattern != "" || + schema.MinItems != 0 || schema.MaxItems != nil || + len(schema.Required) != 0 || + schema.MinProps != 0 || schema.MaxProps != nil { + return false + } + if n := schema.Not; n != nil && !n.Value.IsEmpty() { + return false + } + if ap := schema.AdditionalProperties.Schema; ap != nil && !ap.Value.IsEmpty() { + return false + } + if apa := schema.AdditionalProperties.Has; apa != nil && !*apa { + return false + } + if items := schema.Items; items != nil && !items.Value.IsEmpty() { + return false + } + for _, s := range schema.Properties { + if !s.Value.IsEmpty() { + return false + } + } + for _, s := range schema.OneOf { + if !s.Value.IsEmpty() { + return false + } + } + for _, s := range schema.AnyOf { + if !s.Value.IsEmpty() { + return false + } + } + for _, s := range schema.AllOf { + if !s.Value.IsEmpty() { + return false + } + } + return true +} + +// Validate returns an error if Schema does not comply with the OpenAPI spec. +func (schema *Schema) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + return schema.validate(ctx, []*Schema{}) +} + +func (schema *Schema) validate(ctx context.Context, stack []*Schema) error { + validationOpts := getValidationOptions(ctx) + + for _, existing := range stack { + if existing == schema { + return nil + } + } + stack = append(stack, schema) + + if schema.ReadOnly && schema.WriteOnly { + return errors.New("a property MUST NOT be marked as both readOnly and writeOnly being true") + } + + for _, item := range schema.OneOf { + v := item.Value + if v == nil { + return foundUnresolvedRef(item.Ref) + } + if err := v.validate(ctx, stack); err != nil { + return err + } + } + + for _, item := range schema.AnyOf { + v := item.Value + if v == nil { + return foundUnresolvedRef(item.Ref) + } + if err := v.validate(ctx, stack); err != nil { + return err + } + } + + for _, item := range schema.AllOf { + v := item.Value + if v == nil { + return foundUnresolvedRef(item.Ref) + } + if err := v.validate(ctx, stack); err != nil { + return err + } + } + + if ref := schema.Not; ref != nil { + v := ref.Value + if v == nil { + return foundUnresolvedRef(ref.Ref) + } + if err := v.validate(ctx, stack); err != nil { + return err + } + } + + schemaType := schema.Type + switch schemaType { + case "": + case TypeBoolean: + case TypeNumber: + if format := schema.Format; len(format) > 0 { + switch format { + case "float", "double": + default: + if validationOpts.schemaFormatValidationEnabled { + return unsupportedFormat(format) + } + } + } + case TypeInteger: + if format := schema.Format; len(format) > 0 { + switch format { + case "int32", "int64": + default: + if validationOpts.schemaFormatValidationEnabled { + return unsupportedFormat(format) + } + } + } + case TypeString: + if format := schema.Format; len(format) > 0 { + switch format { + // Supported by OpenAPIv3.0.3: + // https://spec.openapis.org/oas/v3.0.3 + case "byte", "binary", "date", "date-time", "password": + // In JSON Draft-07 (not validated yet though): + // https://json-schema.org/draft-07/json-schema-release-notes.html#formats + case "iri", "iri-reference", "uri-template", "idn-email", "idn-hostname": + case "json-pointer", "relative-json-pointer", "regex", "time": + // In JSON Draft 2019-09 (not validated yet though): + // https://json-schema.org/draft/2019-09/release-notes.html#format-vocabulary + case "duration", "uuid": + // Defined in some other specification + case "email", "hostname", "ipv4", "ipv6", "uri", "uri-reference": + default: + // Try to check for custom defined formats + if _, ok := SchemaStringFormats[format]; !ok && validationOpts.schemaFormatValidationEnabled { + return unsupportedFormat(format) + } + } + } + if schema.Pattern != "" && !validationOpts.schemaPatternValidationDisabled { + if err := schema.compilePattern(); err != nil { + return err + } + } + case TypeArray: + if schema.Items == nil { + return errors.New("when schema type is 'array', schema 'items' must be non-null") + } + case TypeObject: + default: + return fmt.Errorf("unsupported 'type' value %q", schemaType) + } + + if ref := schema.Items; ref != nil { + v := ref.Value + if v == nil { + return foundUnresolvedRef(ref.Ref) + } + if err := v.validate(ctx, stack); err != nil { + return err + } + } + + properties := make([]string, 0, len(schema.Properties)) + for name := range schema.Properties { + properties = append(properties, name) + } + sort.Strings(properties) + for _, name := range properties { + ref := schema.Properties[name] + v := ref.Value + if v == nil { + return foundUnresolvedRef(ref.Ref) + } + if err := v.validate(ctx, stack); err != nil { + return err + } + } + + if schema.AdditionalProperties.Has != nil && schema.AdditionalProperties.Schema != nil { + return errors.New("additionalProperties are set to both boolean and schema") + } + if ref := schema.AdditionalProperties.Schema; ref != nil { + v := ref.Value + if v == nil { + return foundUnresolvedRef(ref.Ref) + } + if err := v.validate(ctx, stack); err != nil { + return err + } + } + + if v := schema.ExternalDocs; v != nil { + if err := v.Validate(ctx); err != nil { + return fmt.Errorf("invalid external docs: %w", err) + } + } + + if v := schema.Default; v != nil && !validationOpts.schemaDefaultsValidationDisabled { + if err := schema.VisitJSON(v); err != nil { + return fmt.Errorf("invalid default: %w", err) + } + } + + if x := schema.Example; x != nil && !validationOpts.examplesValidationDisabled { + if err := validateExampleValue(ctx, x, schema); err != nil { + return fmt.Errorf("invalid example: %w", err) + } + } + + return validateExtensions(ctx, schema.Extensions) +} + +func (schema *Schema) IsMatching(value interface{}) bool { + settings := newSchemaValidationSettings(FailFast()) + return schema.visitJSON(settings, value) == nil +} + +func (schema *Schema) IsMatchingJSONBoolean(value bool) bool { + settings := newSchemaValidationSettings(FailFast()) + return schema.visitJSON(settings, value) == nil +} + +func (schema *Schema) IsMatchingJSONNumber(value float64) bool { + settings := newSchemaValidationSettings(FailFast()) + return schema.visitJSON(settings, value) == nil +} + +func (schema *Schema) IsMatchingJSONString(value string) bool { + settings := newSchemaValidationSettings(FailFast()) + return schema.visitJSON(settings, value) == nil +} + +func (schema *Schema) IsMatchingJSONArray(value []interface{}) bool { + settings := newSchemaValidationSettings(FailFast()) + return schema.visitJSON(settings, value) == nil +} + +func (schema *Schema) IsMatchingJSONObject(value map[string]interface{}) bool { + settings := newSchemaValidationSettings(FailFast()) + return schema.visitJSON(settings, value) == nil +} + +func (schema *Schema) VisitJSON(value interface{}, opts ...SchemaValidationOption) error { + settings := newSchemaValidationSettings(opts...) + return schema.visitJSON(settings, value) +} + +func (schema *Schema) visitJSON(settings *schemaValidationSettings, value interface{}) (err error) { + switch value := value.(type) { + case nil: + return schema.visitJSONNull(settings) + case float64: + if math.IsNaN(value) { + return ErrSchemaInputNaN + } + if math.IsInf(value, 0) { + return ErrSchemaInputInf + } + } + + if schema.IsEmpty() { + return + } + if err = schema.visitSetOperations(settings, value); err != nil { + return + } + + switch value := value.(type) { + case bool: + return schema.visitJSONBoolean(settings, value) + case json.Number: + valueFloat64, err := value.Float64() + if err != nil { + return &SchemaError{ + Value: value, + Schema: schema, + SchemaField: "type", + Reason: "cannot convert json.Number to float64", + customizeMessageError: settings.customizeMessageError, + Origin: err, + } + } + return schema.visitJSONNumber(settings, valueFloat64) + case int: + return schema.visitJSONNumber(settings, float64(value)) + case int32: + return schema.visitJSONNumber(settings, float64(value)) + case int64: + return schema.visitJSONNumber(settings, float64(value)) + case float64: + return schema.visitJSONNumber(settings, value) + case string: + return schema.visitJSONString(settings, value) + case []interface{}: + return schema.visitJSONArray(settings, value) + case map[string]interface{}: + return schema.visitJSONObject(settings, value) + case map[interface{}]interface{}: // for YAML cf. issue #444 + values := make(map[string]interface{}, len(value)) + for key, v := range value { + if k, ok := key.(string); ok { + values[k] = v + } + } + if len(value) == len(values) { + return schema.visitJSONObject(settings, values) + } + } + + // Catch slice of non-empty interface type + if reflect.TypeOf(value).Kind() == reflect.Slice { + valueR := reflect.ValueOf(value) + newValue := make([]interface{}, 0, valueR.Len()) + for i := 0; i < valueR.Len(); i++ { + newValue = append(newValue, valueR.Index(i).Interface()) + } + return schema.visitJSONArray(settings, newValue) + } + + return &SchemaError{ + Value: value, + Schema: schema, + SchemaField: "type", + Reason: fmt.Sprintf("unhandled value of type %T", value), + customizeMessageError: settings.customizeMessageError, + } +} + +func (schema *Schema) visitSetOperations(settings *schemaValidationSettings, value interface{}) (err error) { + if enum := schema.Enum; len(enum) != 0 { + for _, v := range enum { + switch c := value.(type) { + case json.Number: + var f float64 + if f, err = strconv.ParseFloat(c.String(), 64); err != nil { + return err + } + if v == f { + return + } + default: + if reflect.DeepEqual(v, value) { + return + } + } + } + if settings.failfast { + return errSchema + } + allowedValues, _ := json.Marshal(enum) + return &SchemaError{ + Value: value, + Schema: schema, + SchemaField: "enum", + Reason: fmt.Sprintf("value is not one of the allowed values %s", string(allowedValues)), + customizeMessageError: settings.customizeMessageError, + } + } + + if ref := schema.Not; ref != nil { + v := ref.Value + if v == nil { + return foundUnresolvedRef(ref.Ref) + } + if err := v.visitJSON(settings, value); err == nil { + if settings.failfast { + return errSchema + } + return &SchemaError{ + Value: value, + Schema: schema, + SchemaField: "not", + customizeMessageError: settings.customizeMessageError, + } + } + } + + if v := schema.OneOf; len(v) > 0 { + var discriminatorRef string + if schema.Discriminator != nil { + pn := schema.Discriminator.PropertyName + if valuemap, okcheck := value.(map[string]interface{}); okcheck { + discriminatorVal, okcheck := valuemap[pn] + if !okcheck { + return &SchemaError{ + Schema: schema, + SchemaField: "discriminator", + Reason: fmt.Sprintf("input does not contain the discriminator property %q", pn), + } + } + + discriminatorValString, okcheck := discriminatorVal.(string) + if !okcheck { + return &SchemaError{ + Value: discriminatorVal, + Schema: schema, + SchemaField: "discriminator", + Reason: fmt.Sprintf("value of discriminator property %q is not a string", pn), + } + } + + if discriminatorRef, okcheck = schema.Discriminator.Mapping[discriminatorValString]; len(schema.Discriminator.Mapping) > 0 && !okcheck { + return &SchemaError{ + Value: discriminatorVal, + Schema: schema, + SchemaField: "discriminator", + Reason: fmt.Sprintf("discriminator property %q has invalid value", pn), + } + } + } + } + + var ( + ok = 0 + validationErrors = multiErrorForOneOf{} + matchedOneOfIndices = make([]int, 0) + tempValue = value + ) + for idx, item := range v { + v := item.Value + if v == nil { + return foundUnresolvedRef(item.Ref) + } + + if discriminatorRef != "" && discriminatorRef != item.Ref { + continue + } + + // make a deep copy to protect origin value from being injected default value that defined in mismatched oneOf schema + if settings.asreq || settings.asrep { + tempValue = deepcopy.Copy(value) + } + + if err := v.visitJSON(settings, tempValue); err != nil { + validationErrors = append(validationErrors, err) + continue + } + + matchedOneOfIndices = append(matchedOneOfIndices, idx) + ok++ + } + + if ok != 1 { + if settings.failfast { + return errSchema + } + e := &SchemaError{ + Value: value, + Schema: schema, + SchemaField: "oneOf", + customizeMessageError: settings.customizeMessageError, + } + if ok > 1 { + e.Origin = ErrOneOfConflict + e.Reason = fmt.Sprintf(`value matches more than one schema from "oneOf" (matches schemas at indices %v)`, matchedOneOfIndices) + } else { + e.Origin = fmt.Errorf("doesn't match schema due to: %w", validationErrors) + e.Reason = `value doesn't match any schema from "oneOf"` + } + + return e + } + + // run again to inject default value that defined in matched oneOf schema + if settings.asreq || settings.asrep { + _ = v[matchedOneOfIndices[0]].Value.visitJSON(settings, value) + } + } + + if v := schema.AnyOf; len(v) > 0 { + var ( + ok = false + matchedAnyOfIdx = 0 + tempValue = value + ) + for idx, item := range v { + v := item.Value + if v == nil { + return foundUnresolvedRef(item.Ref) + } + // make a deep copy to protect origin value from being injected default value that defined in mismatched anyOf schema + if settings.asreq || settings.asrep { + tempValue = deepcopy.Copy(value) + } + if err := v.visitJSON(settings, tempValue); err == nil { + ok = true + matchedAnyOfIdx = idx + break + } + } + if !ok { + if settings.failfast { + return errSchema + } + return &SchemaError{ + Value: value, + Schema: schema, + SchemaField: "anyOf", + Reason: `doesn't match any schema from "anyOf"`, + customizeMessageError: settings.customizeMessageError, + } + } + + _ = v[matchedAnyOfIdx].Value.visitJSON(settings, value) + } + + for _, item := range schema.AllOf { + v := item.Value + if v == nil { + return foundUnresolvedRef(item.Ref) + } + if err := v.visitJSON(settings, value); err != nil { + if settings.failfast { + return errSchema + } + return &SchemaError{ + Value: value, + Schema: schema, + SchemaField: "allOf", + Reason: `doesn't match all schemas from "allOf"`, + Origin: err, + customizeMessageError: settings.customizeMessageError, + } + } + } + return +} + +// The value is not considered in visitJSONNull because according to the spec +// "null is not supported as a type" unless `nullable` is also set to true +// https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#data-types +// https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#schema-object +func (schema *Schema) visitJSONNull(settings *schemaValidationSettings) (err error) { + if schema.Nullable { + return + } + if settings.failfast { + return errSchema + } + return &SchemaError{ + Value: nil, + Schema: schema, + SchemaField: "nullable", + Reason: "Value is not nullable", + customizeMessageError: settings.customizeMessageError, + } +} + +func (schema *Schema) VisitJSONBoolean(value bool) error { + settings := newSchemaValidationSettings() + return schema.visitJSONBoolean(settings, value) +} + +func (schema *Schema) visitJSONBoolean(settings *schemaValidationSettings, value bool) (err error) { + if schemaType := schema.Type; schemaType != "" && schemaType != TypeBoolean { + return schema.expectedType(settings, value) + } + return +} + +func (schema *Schema) VisitJSONNumber(value float64) error { + settings := newSchemaValidationSettings() + return schema.visitJSONNumber(settings, value) +} + +func (schema *Schema) visitJSONNumber(settings *schemaValidationSettings, value float64) error { + var me MultiError + schemaType := schema.Type + if schemaType == TypeInteger { + if bigFloat := big.NewFloat(value); !bigFloat.IsInt() { + if settings.failfast { + return errSchema + } + err := &SchemaError{ + Value: value, + Schema: schema, + SchemaField: "type", + Reason: fmt.Sprintf("value must be an integer"), + customizeMessageError: settings.customizeMessageError, + } + if !settings.multiError { + return err + } + me = append(me, err) + } + } else if schemaType != "" && schemaType != TypeNumber { + return schema.expectedType(settings, value) + } + + // formats + if schemaType == TypeInteger && schema.Format != "" { + formatMin := float64(0) + formatMax := float64(0) + switch schema.Format { + case "int32": + formatMin = formatMinInt32 + formatMax = formatMaxInt32 + case "int64": + formatMin = formatMinInt64 + formatMax = formatMaxInt64 + default: + if settings.formatValidationEnabled { + return unsupportedFormat(schema.Format) + } + } + if formatMin != 0 && formatMax != 0 && !(formatMin <= value && value <= formatMax) { + if settings.failfast { + return errSchema + } + err := &SchemaError{ + Value: value, + Schema: schema, + SchemaField: "format", + Reason: fmt.Sprintf("number must be an %s", schema.Format), + customizeMessageError: settings.customizeMessageError, + } + if !settings.multiError { + return err + } + me = append(me, err) + } + } + + // "exclusiveMinimum" + if v := schema.ExclusiveMin; v && !(*schema.Min < value) { + if settings.failfast { + return errSchema + } + err := &SchemaError{ + Value: value, + Schema: schema, + SchemaField: "exclusiveMinimum", + Reason: fmt.Sprintf("number must be more than %g", *schema.Min), + customizeMessageError: settings.customizeMessageError, + } + if !settings.multiError { + return err + } + me = append(me, err) + } + + // "exclusiveMaximum" + if v := schema.ExclusiveMax; v && !(*schema.Max > value) { + if settings.failfast { + return errSchema + } + err := &SchemaError{ + Value: value, + Schema: schema, + SchemaField: "exclusiveMaximum", + Reason: fmt.Sprintf("number must be less than %g", *schema.Max), + customizeMessageError: settings.customizeMessageError, + } + if !settings.multiError { + return err + } + me = append(me, err) + } + + // "minimum" + if v := schema.Min; v != nil && !(*v <= value) { + if settings.failfast { + return errSchema + } + err := &SchemaError{ + Value: value, + Schema: schema, + SchemaField: "minimum", + Reason: fmt.Sprintf("number must be at least %g", *v), + customizeMessageError: settings.customizeMessageError, + } + if !settings.multiError { + return err + } + me = append(me, err) + } + + // "maximum" + if v := schema.Max; v != nil && !(*v >= value) { + if settings.failfast { + return errSchema + } + err := &SchemaError{ + Value: value, + Schema: schema, + SchemaField: "maximum", + Reason: fmt.Sprintf("number must be at most %g", *v), + customizeMessageError: settings.customizeMessageError, + } + if !settings.multiError { + return err + } + me = append(me, err) + } + + // "multipleOf" + if v := schema.MultipleOf; v != nil { + // "A numeric instance is valid only if division by this keyword's + // value results in an integer." + if bigFloat := big.NewFloat(value / *v); !bigFloat.IsInt() { + if settings.failfast { + return errSchema + } + err := &SchemaError{ + Value: value, + Schema: schema, + SchemaField: "multipleOf", + Reason: fmt.Sprintf("number must be a multiple of %g", *v), + customizeMessageError: settings.customizeMessageError, + } + if !settings.multiError { + return err + } + me = append(me, err) + } + } + + if len(me) > 0 { + return me + } + + return nil +} + +func (schema *Schema) VisitJSONString(value string) error { + settings := newSchemaValidationSettings() + return schema.visitJSONString(settings, value) +} + +func (schema *Schema) visitJSONString(settings *schemaValidationSettings, value string) error { + if schemaType := schema.Type; schemaType != "" && schemaType != TypeString { + return schema.expectedType(settings, value) + } + + var me MultiError + + // "minLength" and "maxLength" + minLength := schema.MinLength + maxLength := schema.MaxLength + if minLength != 0 || maxLength != nil { + // JSON schema string lengths are UTF-16, not UTF-8! + length := int64(0) + for _, r := range value { + if utf16.IsSurrogate(r) { + length += 2 + } else { + length++ + } + } + if minLength != 0 && length < int64(minLength) { + if settings.failfast { + return errSchema + } + err := &SchemaError{ + Value: value, + Schema: schema, + SchemaField: "minLength", + Reason: fmt.Sprintf("minimum string length is %d", minLength), + customizeMessageError: settings.customizeMessageError, + } + if !settings.multiError { + return err + } + me = append(me, err) + } + if maxLength != nil && length > int64(*maxLength) { + if settings.failfast { + return errSchema + } + err := &SchemaError{ + Value: value, + Schema: schema, + SchemaField: "maxLength", + Reason: fmt.Sprintf("maximum string length is %d", *maxLength), + customizeMessageError: settings.customizeMessageError, + } + if !settings.multiError { + return err + } + me = append(me, err) + } + } + + // "pattern" + if schema.Pattern != "" && schema.compiledPattern == nil && !settings.patternValidationDisabled { + var err error + if err = schema.compilePattern(); err != nil { + if !settings.multiError { + return err + } + me = append(me, err) + } + } + if cp := schema.compiledPattern; cp != nil && !cp.MatchString(value) { + err := &SchemaError{ + Value: value, + Schema: schema, + SchemaField: "pattern", + Reason: fmt.Sprintf(`string doesn't match the regular expression "%s"`, schema.Pattern), + customizeMessageError: settings.customizeMessageError, + } + if !settings.multiError { + return err + } + me = append(me, err) + } + + // "format" + var formatStrErr string + var formatErr error + if format := schema.Format; format != "" { + if f, ok := SchemaStringFormats[format]; ok { + switch { + case f.regexp != nil && f.callback == nil: + if cp := f.regexp; !cp.MatchString(value) { + formatStrErr = fmt.Sprintf(`string doesn't match the format %q (regular expression "%s")`, format, cp.String()) + } + case f.regexp == nil && f.callback != nil: + if err := f.callback(value); err != nil { + var schemaErr = &SchemaError{} + if errors.As(err, &schemaErr) { + formatStrErr = fmt.Sprintf(`string doesn't match the format %q (%s)`, format, schemaErr.Reason) + } else { + formatStrErr = fmt.Sprintf(`string doesn't match the format %q (%v)`, format, err) + } + formatErr = err + } + default: + formatStrErr = fmt.Sprintf("corrupted entry %q in SchemaStringFormats", format) + } + } + } + if formatStrErr != "" || formatErr != nil { + err := &SchemaError{ + Value: value, + Schema: schema, + SchemaField: "format", + Reason: formatStrErr, + Origin: formatErr, + customizeMessageError: settings.customizeMessageError, + } + if !settings.multiError { + return err + } + me = append(me, err) + + } + + if len(me) > 0 { + return me + } + + return nil +} + +func (schema *Schema) VisitJSONArray(value []interface{}) error { + settings := newSchemaValidationSettings() + return schema.visitJSONArray(settings, value) +} + +func (schema *Schema) visitJSONArray(settings *schemaValidationSettings, value []interface{}) error { + if schemaType := schema.Type; schemaType != "" && schemaType != TypeArray { + return schema.expectedType(settings, value) + } + + var me MultiError + + lenValue := int64(len(value)) + + // "minItems" + if v := schema.MinItems; v != 0 && lenValue < int64(v) { + if settings.failfast { + return errSchema + } + err := &SchemaError{ + Value: value, + Schema: schema, + SchemaField: "minItems", + Reason: fmt.Sprintf("minimum number of items is %d", v), + customizeMessageError: settings.customizeMessageError, + } + if !settings.multiError { + return err + } + me = append(me, err) + } + + // "maxItems" + if v := schema.MaxItems; v != nil && lenValue > int64(*v) { + if settings.failfast { + return errSchema + } + err := &SchemaError{ + Value: value, + Schema: schema, + SchemaField: "maxItems", + Reason: fmt.Sprintf("maximum number of items is %d", *v), + customizeMessageError: settings.customizeMessageError, + } + if !settings.multiError { + return err + } + me = append(me, err) + } + + // "uniqueItems" + if sliceUniqueItemsChecker == nil { + sliceUniqueItemsChecker = isSliceOfUniqueItems + } + if v := schema.UniqueItems; v && !sliceUniqueItemsChecker(value) { + if settings.failfast { + return errSchema + } + err := &SchemaError{ + Value: value, + Schema: schema, + SchemaField: "uniqueItems", + Reason: "duplicate items found", + customizeMessageError: settings.customizeMessageError, + } + if !settings.multiError { + return err + } + me = append(me, err) + } + + // "items" + if itemSchemaRef := schema.Items; itemSchemaRef != nil { + itemSchema := itemSchemaRef.Value + if itemSchema == nil { + return foundUnresolvedRef(itemSchemaRef.Ref) + } + for i, item := range value { + if err := itemSchema.visitJSON(settings, item); err != nil { + err = markSchemaErrorIndex(err, i) + if !settings.multiError { + return err + } + if itemMe, ok := err.(MultiError); ok { + me = append(me, itemMe...) + } else { + me = append(me, err) + } + } + } + } + + if len(me) > 0 { + return me + } + + return nil +} + +func (schema *Schema) VisitJSONObject(value map[string]interface{}) error { + settings := newSchemaValidationSettings() + return schema.visitJSONObject(settings, value) +} + +func (schema *Schema) visitJSONObject(settings *schemaValidationSettings, value map[string]interface{}) error { + if schemaType := schema.Type; schemaType != "" && schemaType != TypeObject { + return schema.expectedType(settings, value) + } + + var me MultiError + + if settings.asreq || settings.asrep { + properties := make([]string, 0, len(schema.Properties)) + for propName := range schema.Properties { + properties = append(properties, propName) + } + sort.Strings(properties) + for _, propName := range properties { + propSchema := schema.Properties[propName] + reqRO := settings.asreq && propSchema.Value.ReadOnly && !settings.readOnlyValidationDisabled + repWO := settings.asrep && propSchema.Value.WriteOnly && !settings.writeOnlyValidationDisabled + + if f := settings.defaultsSet; f != nil && value[propName] == nil { + if dflt := propSchema.Value.Default; dflt != nil && !reqRO && !repWO { + value[propName] = dflt + settings.onceSettingDefaults.Do(f) + } + } + + if value[propName] != nil { + if reqRO { + me = append(me, fmt.Errorf("readOnly property %q in request", propName)) + } else if repWO { + me = append(me, fmt.Errorf("writeOnly property %q in response", propName)) + } + } + } + } + + // "properties" + properties := schema.Properties + lenValue := int64(len(value)) + + // "minProperties" + if v := schema.MinProps; v != 0 && lenValue < int64(v) { + if settings.failfast { + return errSchema + } + err := &SchemaError{ + Value: value, + Schema: schema, + SchemaField: "minProperties", + Reason: fmt.Sprintf("there must be at least %d properties", v), + customizeMessageError: settings.customizeMessageError, + } + if !settings.multiError { + return err + } + me = append(me, err) + } + + // "maxProperties" + if v := schema.MaxProps; v != nil && lenValue > int64(*v) { + if settings.failfast { + return errSchema + } + err := &SchemaError{ + Value: value, + Schema: schema, + SchemaField: "maxProperties", + Reason: fmt.Sprintf("there must be at most %d properties", *v), + customizeMessageError: settings.customizeMessageError, + } + if !settings.multiError { + return err + } + me = append(me, err) + } + + // "additionalProperties" + var additionalProperties *Schema + if ref := schema.AdditionalProperties.Schema; ref != nil { + additionalProperties = ref.Value + } + keys := make([]string, 0, len(value)) + for k := range value { + keys = append(keys, k) + } + sort.Strings(keys) + for _, k := range keys { + v := value[k] + if properties != nil { + propertyRef := properties[k] + if propertyRef != nil { + p := propertyRef.Value + if p == nil { + return foundUnresolvedRef(propertyRef.Ref) + } + if err := p.visitJSON(settings, v); err != nil { + if settings.failfast { + return errSchema + } + err = markSchemaErrorKey(err, k) + if !settings.multiError { + return err + } + if v, ok := err.(MultiError); ok { + me = append(me, v...) + continue + } + me = append(me, err) + } + continue + } + } + if allowed := schema.AdditionalProperties.Has; allowed == nil || *allowed { + if additionalProperties != nil { + if err := additionalProperties.visitJSON(settings, v); err != nil { + if settings.failfast { + return errSchema + } + err = markSchemaErrorKey(err, k) + if !settings.multiError { + return err + } + if v, ok := err.(MultiError); ok { + me = append(me, v...) + continue + } + me = append(me, err) + } + } + continue + } + if settings.failfast { + return errSchema + } + err := &SchemaError{ + Value: value, + Schema: schema, + SchemaField: "properties", + Reason: fmt.Sprintf("property %q is unsupported", k), + customizeMessageError: settings.customizeMessageError, + } + if !settings.multiError { + return err + } + me = append(me, err) + } + + // "required" + for _, k := range schema.Required { + if _, ok := value[k]; !ok { + if s := schema.Properties[k]; s != nil && s.Value.ReadOnly && settings.asreq { + continue + } + if s := schema.Properties[k]; s != nil && s.Value.WriteOnly && settings.asrep { + continue + } + if settings.failfast { + return errSchema + } + err := markSchemaErrorKey(&SchemaError{ + Value: value, + Schema: schema, + SchemaField: "required", + Reason: fmt.Sprintf("property %q is missing", k), + customizeMessageError: settings.customizeMessageError, + }, k) + if !settings.multiError { + return err + } + me = append(me, err) + } + } + + if len(me) > 0 { + return me + } + + return nil +} + +func (schema *Schema) expectedType(settings *schemaValidationSettings, value interface{}) error { + if settings.failfast { + return errSchema + } + + a := "a" + switch schema.Type { + case TypeArray, TypeObject, TypeInteger: + a = "an" + } + return &SchemaError{ + Value: value, + Schema: schema, + SchemaField: "type", + Reason: fmt.Sprintf("value must be %s %s", a, schema.Type), + customizeMessageError: settings.customizeMessageError, + } +} + +func (schema *Schema) compilePattern() (err error) { + if schema.compiledPattern, err = regexp.Compile(schema.Pattern); err != nil { + return &SchemaError{ + Schema: schema, + SchemaField: "pattern", + Origin: err, + Reason: fmt.Sprintf("cannot compile pattern %q: %v", schema.Pattern, err), + } + } + return nil +} + +// SchemaError is an error that occurs during schema validation. +type SchemaError struct { + // Value is the value that failed validation. + Value interface{} + // reversePath is the path to the value that failed validation. + reversePath []string + // Schema is the schema that failed validation. + Schema *Schema + // SchemaField is the field of the schema that failed validation. + SchemaField string + // Reason is a human-readable message describing the error. + // The message should never include the original value to prevent leakage of potentially sensitive inputs in error messages. + Reason string + // Origin is the original error that caused this error. + Origin error + // customizeMessageError is a function that can be used to customize the error message. + customizeMessageError func(err *SchemaError) string +} + +var _ interface{ Unwrap() error } = SchemaError{} + +func markSchemaErrorKey(err error, key string) error { + var me multiErrorForOneOf + + if errors.As(err, &me) { + err = me.Unwrap() + } + + if v, ok := err.(*SchemaError); ok { + v.reversePath = append(v.reversePath, key) + return v + } + if v, ok := err.(MultiError); ok { + for _, e := range v { + _ = markSchemaErrorKey(e, key) + } + return v + } + return err +} + +func markSchemaErrorIndex(err error, index int) error { + return markSchemaErrorKey(err, strconv.FormatInt(int64(index), 10)) +} + +func (err *SchemaError) JSONPointer() []string { + reversePath := err.reversePath + path := append([]string(nil), reversePath...) + for left, right := 0, len(path)-1; left < right; left, right = left+1, right-1 { + path[left], path[right] = path[right], path[left] + } + return path +} + +func (err *SchemaError) Error() string { + if err.customizeMessageError != nil { + if msg := err.customizeMessageError(err); msg != "" { + return msg + } + } + + buf := bytes.NewBuffer(make([]byte, 0, 256)) + + if len(err.reversePath) > 0 { + buf.WriteString(`Error at "`) + reversePath := err.reversePath + for i := len(reversePath) - 1; i >= 0; i-- { + buf.WriteByte('/') + buf.WriteString(reversePath[i]) + } + buf.WriteString(`": `) + } + + if err.Origin != nil { + buf.WriteString(err.Origin.Error()) + + return buf.String() + } + + reason := err.Reason + if reason == "" { + buf.WriteString(`Doesn't match schema "`) + buf.WriteString(err.SchemaField) + buf.WriteString(`"`) + } else { + buf.WriteString(reason) + } + + if !SchemaErrorDetailsDisabled { + buf.WriteString("\nSchema:\n ") + encoder := json.NewEncoder(buf) + encoder.SetIndent(" ", " ") + if err := encoder.Encode(err.Schema); err != nil { + panic(err) + } + buf.WriteString("\nValue:\n ") + if err := encoder.Encode(err.Value); err != nil { + panic(err) + } + } + + return buf.String() +} + +func (err SchemaError) Unwrap() error { + return err.Origin +} + +func isSliceOfUniqueItems(xs []interface{}) bool { + s := len(xs) + m := make(map[string]struct{}, s) + for _, x := range xs { + // The input slice is coverted from a JSON string, there shall + // have no error when covert it back. + key, _ := json.Marshal(&x) + m[string(key)] = struct{}{} + } + return s == len(m) +} + +// SliceUniqueItemsChecker is an function used to check if an given slice +// have unique items. +type SliceUniqueItemsChecker func(items []interface{}) bool + +// By default using predefined func isSliceOfUniqueItems which make use of +// json.Marshal to generate a key for map used to check if a given slice +// have unique items. +var sliceUniqueItemsChecker SliceUniqueItemsChecker = isSliceOfUniqueItems + +// RegisterArrayUniqueItemsChecker is used to register a customized function +// used to check if JSON array have unique items. +func RegisterArrayUniqueItemsChecker(fn SliceUniqueItemsChecker) { + sliceUniqueItemsChecker = fn +} + +func unsupportedFormat(format string) error { + return fmt.Errorf("unsupported 'format' value %q", format) +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/schema_formats.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/schema_formats.go new file mode 100644 index 00000000..ea38400c --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/schema_formats.go @@ -0,0 +1,106 @@ +package openapi3 + +import ( + "fmt" + "net" + "regexp" + "strings" +) + +const ( + // FormatOfStringForUUIDOfRFC4122 is an optional predefined format for UUID v1-v5 as specified by RFC4122 + FormatOfStringForUUIDOfRFC4122 = `^(?:[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}|00000000-0000-0000-0000-000000000000)$` + + // FormatOfStringForEmail pattern catches only some suspiciously wrong-looking email addresses. + // Use DefineStringFormat(...) if you need something stricter. + FormatOfStringForEmail = `^[^@]+@[^@<>",\s]+$` +) + +// FormatCallback performs custom checks on exotic formats +type FormatCallback func(value string) error + +// Format represents a format validator registered by either DefineStringFormat or DefineStringFormatCallback +type Format struct { + regexp *regexp.Regexp + callback FormatCallback +} + +// SchemaStringFormats allows for validating string formats +var SchemaStringFormats = make(map[string]Format, 4) + +// DefineStringFormat defines a new regexp pattern for a given format +func DefineStringFormat(name string, pattern string) { + re, err := regexp.Compile(pattern) + if err != nil { + err := fmt.Errorf("format %q has invalid pattern %q: %w", name, pattern, err) + panic(err) + } + SchemaStringFormats[name] = Format{regexp: re} +} + +// DefineStringFormatCallback adds a validation function for a specific schema format entry +func DefineStringFormatCallback(name string, callback FormatCallback) { + SchemaStringFormats[name] = Format{callback: callback} +} + +func validateIP(ip string) error { + parsed := net.ParseIP(ip) + if parsed == nil { + return &SchemaError{ + Value: ip, + Reason: "Not an IP address", + } + } + return nil +} + +func validateIPv4(ip string) error { + if err := validateIP(ip); err != nil { + return err + } + + if !(strings.Count(ip, ":") < 2) { + return &SchemaError{ + Value: ip, + Reason: "Not an IPv4 address (it's IPv6)", + } + } + return nil +} + +func validateIPv6(ip string) error { + if err := validateIP(ip); err != nil { + return err + } + + if !(strings.Count(ip, ":") >= 2) { + return &SchemaError{ + Value: ip, + Reason: "Not an IPv6 address (it's IPv4)", + } + } + return nil +} + +func init() { + // Base64 + // The pattern supports base64 and b./ase64url. Padding ('=') is supported. + DefineStringFormat("byte", `(^$|^[a-zA-Z0-9+/\-_]*=*$)`) + + // date + DefineStringFormat("date", `^[0-9]{4}-(0[0-9]|10|11|12)-([0-2][0-9]|30|31)$`) + + // date-time + DefineStringFormat("date-time", `^[0-9]{4}-(0[0-9]|10|11|12)-([0-2][0-9]|30|31)T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.[0-9]+)?(Z|(\+|-)[0-9]{2}:[0-9]{2})?$`) + +} + +// DefineIPv4Format opts in ipv4 format validation on top of OAS 3 spec +func DefineIPv4Format() { + DefineStringFormatCallback("ipv4", validateIPv4) +} + +// DefineIPv6Format opts in ipv6 format validation on top of OAS 3 spec +func DefineIPv6Format() { + DefineStringFormatCallback("ipv6", validateIPv6) +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/schema_validation_settings.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/schema_validation_settings.go new file mode 100644 index 00000000..17aad2fa --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/schema_validation_settings.go @@ -0,0 +1,79 @@ +package openapi3 + +import ( + "sync" +) + +// SchemaValidationOption describes options a user has when validating request / response bodies. +type SchemaValidationOption func(*schemaValidationSettings) + +type schemaValidationSettings struct { + failfast bool + multiError bool + asreq, asrep bool // exclusive (XOR) fields + formatValidationEnabled bool + patternValidationDisabled bool + readOnlyValidationDisabled bool + writeOnlyValidationDisabled bool + + onceSettingDefaults sync.Once + defaultsSet func() + + customizeMessageError func(err *SchemaError) string +} + +// FailFast returns schema validation errors quicker. +func FailFast() SchemaValidationOption { + return func(s *schemaValidationSettings) { s.failfast = true } +} + +func MultiErrors() SchemaValidationOption { + return func(s *schemaValidationSettings) { s.multiError = true } +} + +func VisitAsRequest() SchemaValidationOption { + return func(s *schemaValidationSettings) { s.asreq, s.asrep = true, false } +} + +func VisitAsResponse() SchemaValidationOption { + return func(s *schemaValidationSettings) { s.asreq, s.asrep = false, true } +} + +// EnableFormatValidation setting makes Validate not return an error when validating documents that mention schema formats that are not defined by the OpenAPIv3 specification. +func EnableFormatValidation() SchemaValidationOption { + return func(s *schemaValidationSettings) { s.formatValidationEnabled = true } +} + +// DisablePatternValidation setting makes Validate not return an error when validating patterns that are not supported by the Go regexp engine. +func DisablePatternValidation() SchemaValidationOption { + return func(s *schemaValidationSettings) { s.patternValidationDisabled = true } +} + +// DisableReadOnlyValidation setting makes Validate not return an error when validating properties marked as read-only +func DisableReadOnlyValidation() SchemaValidationOption { + return func(s *schemaValidationSettings) { s.readOnlyValidationDisabled = true } +} + +// DisableWriteOnlyValidation setting makes Validate not return an error when validating properties marked as write-only +func DisableWriteOnlyValidation() SchemaValidationOption { + return func(s *schemaValidationSettings) { s.writeOnlyValidationDisabled = true } +} + +// DefaultsSet executes the given callback (once) IFF schema validation set default values. +func DefaultsSet(f func()) SchemaValidationOption { + return func(s *schemaValidationSettings) { s.defaultsSet = f } +} + +// SetSchemaErrorMessageCustomizer allows to override the schema error message. +// If the passed function returns an empty string, it returns to the previous Error() implementation. +func SetSchemaErrorMessageCustomizer(f func(err *SchemaError) string) SchemaValidationOption { + return func(s *schemaValidationSettings) { s.customizeMessageError = f } +} + +func newSchemaValidationSettings(opts ...SchemaValidationOption) *schemaValidationSettings { + settings := &schemaValidationSettings{} + for _, opt := range opts { + opt(settings) + } + return settings +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/security_requirements.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/security_requirements.go new file mode 100644 index 00000000..87891c95 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/security_requirements.go @@ -0,0 +1,51 @@ +package openapi3 + +import ( + "context" +) + +type SecurityRequirements []SecurityRequirement + +func NewSecurityRequirements() *SecurityRequirements { + return &SecurityRequirements{} +} + +func (srs *SecurityRequirements) With(securityRequirement SecurityRequirement) *SecurityRequirements { + *srs = append(*srs, securityRequirement) + return srs +} + +// Validate returns an error if SecurityRequirements does not comply with the OpenAPI spec. +func (srs SecurityRequirements) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + + for _, security := range srs { + if err := security.Validate(ctx); err != nil { + return err + } + } + return nil +} + +// SecurityRequirement is specified by OpenAPI/Swagger standard version 3. +// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#security-requirement-object +type SecurityRequirement map[string][]string + +func NewSecurityRequirement() SecurityRequirement { + return make(SecurityRequirement) +} + +func (security SecurityRequirement) Authenticate(provider string, scopes ...string) SecurityRequirement { + if len(scopes) == 0 { + scopes = []string{} // Forces the variable to be encoded as an array instead of null + } + security[provider] = scopes + return security +} + +// Validate returns an error if SecurityRequirement does not comply with the OpenAPI spec. +func (security *SecurityRequirement) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + + return nil +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/security_scheme.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/security_scheme.go new file mode 100644 index 00000000..76cc21f3 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/security_scheme.go @@ -0,0 +1,412 @@ +package openapi3 + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "net/url" + + "github.com/go-openapi/jsonpointer" +) + +type SecuritySchemes map[string]*SecuritySchemeRef + +// JSONLookup implements github.com/go-openapi/jsonpointer#JSONPointable +func (s SecuritySchemes) JSONLookup(token string) (interface{}, error) { + ref, ok := s[token] + if ref == nil || ok == false { + return nil, fmt.Errorf("object has no field %q", token) + } + + if ref.Ref != "" { + return &Ref{Ref: ref.Ref}, nil + } + return ref.Value, nil +} + +var _ jsonpointer.JSONPointable = (*SecuritySchemes)(nil) + +// SecurityScheme is specified by OpenAPI/Swagger standard version 3. +// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#security-scheme-object +type SecurityScheme struct { + Extensions map[string]interface{} `json:"-" yaml:"-"` + + Type string `json:"type,omitempty" yaml:"type,omitempty"` + Description string `json:"description,omitempty" yaml:"description,omitempty"` + Name string `json:"name,omitempty" yaml:"name,omitempty"` + In string `json:"in,omitempty" yaml:"in,omitempty"` + Scheme string `json:"scheme,omitempty" yaml:"scheme,omitempty"` + BearerFormat string `json:"bearerFormat,omitempty" yaml:"bearerFormat,omitempty"` + Flows *OAuthFlows `json:"flows,omitempty" yaml:"flows,omitempty"` + OpenIdConnectUrl string `json:"openIdConnectUrl,omitempty" yaml:"openIdConnectUrl,omitempty"` +} + +func NewSecurityScheme() *SecurityScheme { + return &SecurityScheme{} +} + +func NewCSRFSecurityScheme() *SecurityScheme { + return &SecurityScheme{ + Type: "apiKey", + In: "header", + Name: "X-XSRF-TOKEN", + } +} + +func NewOIDCSecurityScheme(oidcUrl string) *SecurityScheme { + return &SecurityScheme{ + Type: "openIdConnect", + OpenIdConnectUrl: oidcUrl, + } +} + +func NewJWTSecurityScheme() *SecurityScheme { + return &SecurityScheme{ + Type: "http", + Scheme: "bearer", + BearerFormat: "JWT", + } +} + +// MarshalJSON returns the JSON encoding of SecurityScheme. +func (ss SecurityScheme) MarshalJSON() ([]byte, error) { + m := make(map[string]interface{}, 8+len(ss.Extensions)) + for k, v := range ss.Extensions { + m[k] = v + } + if x := ss.Type; x != "" { + m["type"] = x + } + if x := ss.Description; x != "" { + m["description"] = x + } + if x := ss.Name; x != "" { + m["name"] = x + } + if x := ss.In; x != "" { + m["in"] = x + } + if x := ss.Scheme; x != "" { + m["scheme"] = x + } + if x := ss.BearerFormat; x != "" { + m["bearerFormat"] = x + } + if x := ss.Flows; x != nil { + m["flows"] = x + } + if x := ss.OpenIdConnectUrl; x != "" { + m["openIdConnectUrl"] = x + } + return json.Marshal(m) +} + +// UnmarshalJSON sets SecurityScheme to a copy of data. +func (ss *SecurityScheme) UnmarshalJSON(data []byte) error { + type SecuritySchemeBis SecurityScheme + var x SecuritySchemeBis + if err := json.Unmarshal(data, &x); err != nil { + return err + } + _ = json.Unmarshal(data, &x.Extensions) + delete(x.Extensions, "type") + delete(x.Extensions, "description") + delete(x.Extensions, "name") + delete(x.Extensions, "in") + delete(x.Extensions, "scheme") + delete(x.Extensions, "bearerFormat") + delete(x.Extensions, "flows") + delete(x.Extensions, "openIdConnectUrl") + *ss = SecurityScheme(x) + return nil +} + +func (ss *SecurityScheme) WithType(value string) *SecurityScheme { + ss.Type = value + return ss +} + +func (ss *SecurityScheme) WithDescription(value string) *SecurityScheme { + ss.Description = value + return ss +} + +func (ss *SecurityScheme) WithName(value string) *SecurityScheme { + ss.Name = value + return ss +} + +func (ss *SecurityScheme) WithIn(value string) *SecurityScheme { + ss.In = value + return ss +} + +func (ss *SecurityScheme) WithScheme(value string) *SecurityScheme { + ss.Scheme = value + return ss +} + +func (ss *SecurityScheme) WithBearerFormat(value string) *SecurityScheme { + ss.BearerFormat = value + return ss +} + +// Validate returns an error if SecurityScheme does not comply with the OpenAPI spec. +func (ss *SecurityScheme) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + + hasIn := false + hasBearerFormat := false + hasFlow := false + switch ss.Type { + case "apiKey": + hasIn = true + case "http": + scheme := ss.Scheme + switch scheme { + case "bearer": + hasBearerFormat = true + case "basic", "negotiate", "digest": + default: + return fmt.Errorf("security scheme of type 'http' has invalid 'scheme' value %q", scheme) + } + case "oauth2": + hasFlow = true + case "openIdConnect": + if ss.OpenIdConnectUrl == "" { + return fmt.Errorf("no OIDC URL found for openIdConnect security scheme %q", ss.Name) + } + default: + return fmt.Errorf("security scheme 'type' can't be %q", ss.Type) + } + + // Validate "in" and "name" + if hasIn { + switch ss.In { + case "query", "header", "cookie": + default: + return fmt.Errorf("security scheme of type 'apiKey' should have 'in'. It can be 'query', 'header' or 'cookie', not %q", ss.In) + } + if ss.Name == "" { + return errors.New("security scheme of type 'apiKey' should have 'name'") + } + } else if len(ss.In) > 0 { + return fmt.Errorf("security scheme of type %q can't have 'in'", ss.Type) + } else if len(ss.Name) > 0 { + return fmt.Errorf("security scheme of type %q can't have 'name'", ss.Type) + } + + // Validate "format" + // "bearerFormat" is an arbitrary string so we only check if the scheme supports it + if !hasBearerFormat && len(ss.BearerFormat) > 0 { + return fmt.Errorf("security scheme of type %q can't have 'bearerFormat'", ss.Type) + } + + // Validate "flow" + if hasFlow { + flow := ss.Flows + if flow == nil { + return fmt.Errorf("security scheme of type %q should have 'flows'", ss.Type) + } + if err := flow.Validate(ctx); err != nil { + return fmt.Errorf("security scheme 'flow' is invalid: %w", err) + } + } else if ss.Flows != nil { + return fmt.Errorf("security scheme of type %q can't have 'flows'", ss.Type) + } + + return validateExtensions(ctx, ss.Extensions) +} + +// OAuthFlows is specified by OpenAPI/Swagger standard version 3. +// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#oauth-flows-object +type OAuthFlows struct { + Extensions map[string]interface{} `json:"-" yaml:"-"` + + Implicit *OAuthFlow `json:"implicit,omitempty" yaml:"implicit,omitempty"` + Password *OAuthFlow `json:"password,omitempty" yaml:"password,omitempty"` + ClientCredentials *OAuthFlow `json:"clientCredentials,omitempty" yaml:"clientCredentials,omitempty"` + AuthorizationCode *OAuthFlow `json:"authorizationCode,omitempty" yaml:"authorizationCode,omitempty"` +} + +type oAuthFlowType int + +const ( + oAuthFlowTypeImplicit oAuthFlowType = iota + oAuthFlowTypePassword + oAuthFlowTypeClientCredentials + oAuthFlowAuthorizationCode +) + +// MarshalJSON returns the JSON encoding of OAuthFlows. +func (flows OAuthFlows) MarshalJSON() ([]byte, error) { + m := make(map[string]interface{}, 4+len(flows.Extensions)) + for k, v := range flows.Extensions { + m[k] = v + } + if x := flows.Implicit; x != nil { + m["implicit"] = x + } + if x := flows.Password; x != nil { + m["password"] = x + } + if x := flows.ClientCredentials; x != nil { + m["clientCredentials"] = x + } + if x := flows.AuthorizationCode; x != nil { + m["authorizationCode"] = x + } + return json.Marshal(m) +} + +// UnmarshalJSON sets OAuthFlows to a copy of data. +func (flows *OAuthFlows) UnmarshalJSON(data []byte) error { + type OAuthFlowsBis OAuthFlows + var x OAuthFlowsBis + if err := json.Unmarshal(data, &x); err != nil { + return err + } + _ = json.Unmarshal(data, &x.Extensions) + delete(x.Extensions, "implicit") + delete(x.Extensions, "password") + delete(x.Extensions, "clientCredentials") + delete(x.Extensions, "authorizationCode") + *flows = OAuthFlows(x) + return nil +} + +// Validate returns an error if OAuthFlows does not comply with the OpenAPI spec. +func (flows *OAuthFlows) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + + if v := flows.Implicit; v != nil { + if err := v.validate(ctx, oAuthFlowTypeImplicit, opts...); err != nil { + return fmt.Errorf("the OAuth flow 'implicit' is invalid: %w", err) + } + } + + if v := flows.Password; v != nil { + if err := v.validate(ctx, oAuthFlowTypePassword, opts...); err != nil { + return fmt.Errorf("the OAuth flow 'password' is invalid: %w", err) + } + } + + if v := flows.ClientCredentials; v != nil { + if err := v.validate(ctx, oAuthFlowTypeClientCredentials, opts...); err != nil { + return fmt.Errorf("the OAuth flow 'clientCredentials' is invalid: %w", err) + } + } + + if v := flows.AuthorizationCode; v != nil { + if err := v.validate(ctx, oAuthFlowAuthorizationCode, opts...); err != nil { + return fmt.Errorf("the OAuth flow 'authorizationCode' is invalid: %w", err) + } + } + + return validateExtensions(ctx, flows.Extensions) +} + +// OAuthFlow is specified by OpenAPI/Swagger standard version 3. +// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#oauth-flow-object +type OAuthFlow struct { + Extensions map[string]interface{} `json:"-" yaml:"-"` + + AuthorizationURL string `json:"authorizationUrl,omitempty" yaml:"authorizationUrl,omitempty"` + TokenURL string `json:"tokenUrl,omitempty" yaml:"tokenUrl,omitempty"` + RefreshURL string `json:"refreshUrl,omitempty" yaml:"refreshUrl,omitempty"` + Scopes map[string]string `json:"scopes" yaml:"scopes"` // required +} + +// MarshalJSON returns the JSON encoding of OAuthFlow. +func (flow OAuthFlow) MarshalJSON() ([]byte, error) { + m := make(map[string]interface{}, 4+len(flow.Extensions)) + for k, v := range flow.Extensions { + m[k] = v + } + if x := flow.AuthorizationURL; x != "" { + m["authorizationUrl"] = x + } + if x := flow.TokenURL; x != "" { + m["tokenUrl"] = x + } + if x := flow.RefreshURL; x != "" { + m["refreshUrl"] = x + } + m["scopes"] = flow.Scopes + return json.Marshal(m) +} + +// UnmarshalJSON sets OAuthFlow to a copy of data. +func (flow *OAuthFlow) UnmarshalJSON(data []byte) error { + type OAuthFlowBis OAuthFlow + var x OAuthFlowBis + if err := json.Unmarshal(data, &x); err != nil { + return err + } + _ = json.Unmarshal(data, &x.Extensions) + delete(x.Extensions, "authorizationUrl") + delete(x.Extensions, "tokenUrl") + delete(x.Extensions, "refreshUrl") + delete(x.Extensions, "scopes") + *flow = OAuthFlow(x) + return nil +} + +// Validate returns an error if OAuthFlows does not comply with the OpenAPI spec. +func (flow *OAuthFlow) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + + if v := flow.RefreshURL; v != "" { + if _, err := url.Parse(v); err != nil { + return fmt.Errorf("field 'refreshUrl' is invalid: %w", err) + } + } + + if flow.Scopes == nil { + return errors.New("field 'scopes' is missing") + } + + return validateExtensions(ctx, flow.Extensions) +} + +func (flow *OAuthFlow) validate(ctx context.Context, typ oAuthFlowType, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + + typeIn := func(types ...oAuthFlowType) bool { + for _, ty := range types { + if ty == typ { + return true + } + } + return false + } + + if in := typeIn(oAuthFlowTypeImplicit, oAuthFlowAuthorizationCode); true { + switch { + case flow.AuthorizationURL == "" && in: + return errors.New("field 'authorizationUrl' is empty or missing") + case flow.AuthorizationURL != "" && !in: + return errors.New("field 'authorizationUrl' should not be set") + case flow.AuthorizationURL != "": + if _, err := url.Parse(flow.AuthorizationURL); err != nil { + return fmt.Errorf("field 'authorizationUrl' is invalid: %w", err) + } + } + } + + if in := typeIn(oAuthFlowTypePassword, oAuthFlowTypeClientCredentials, oAuthFlowAuthorizationCode); true { + switch { + case flow.TokenURL == "" && in: + return errors.New("field 'tokenUrl' is empty or missing") + case flow.TokenURL != "" && !in: + return errors.New("field 'tokenUrl' should not be set") + case flow.TokenURL != "": + if _, err := url.Parse(flow.TokenURL); err != nil { + return fmt.Errorf("field 'tokenUrl' is invalid: %w", err) + } + } + } + + return flow.Validate(ctx, opts...) +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/serialization_method.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/serialization_method.go new file mode 100644 index 00000000..2ec8bd2d --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/serialization_method.go @@ -0,0 +1,17 @@ +package openapi3 + +const ( + SerializationSimple = "simple" + SerializationLabel = "label" + SerializationMatrix = "matrix" + SerializationForm = "form" + SerializationSpaceDelimited = "spaceDelimited" + SerializationPipeDelimited = "pipeDelimited" + SerializationDeepObject = "deepObject" +) + +// SerializationMethod describes a serialization method of HTTP request's parameters and body. +type SerializationMethod struct { + Style string + Explode bool +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/server.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/server.go new file mode 100644 index 00000000..9fc99f90 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/server.go @@ -0,0 +1,278 @@ +package openapi3 + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "math" + "net/url" + "sort" + "strings" +) + +// Servers is specified by OpenAPI/Swagger standard version 3. +type Servers []*Server + +// Validate returns an error if Servers does not comply with the OpenAPI spec. +func (servers Servers) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + + for _, v := range servers { + if err := v.Validate(ctx); err != nil { + return err + } + } + return nil +} + +// BasePath returns the base path of the first server in the list, or /. +func (servers Servers) BasePath() (string, error) { + for _, server := range servers { + return server.BasePath() + } + return "/", nil +} + +func (servers Servers) MatchURL(parsedURL *url.URL) (*Server, []string, string) { + rawURL := parsedURL.String() + if i := strings.IndexByte(rawURL, '?'); i >= 0 { + rawURL = rawURL[:i] + } + for _, server := range servers { + pathParams, remaining, ok := server.MatchRawURL(rawURL) + if ok { + return server, pathParams, remaining + } + } + return nil, nil, "" +} + +// Server is specified by OpenAPI/Swagger standard version 3. +// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#server-object +type Server struct { + Extensions map[string]interface{} `json:"-" yaml:"-"` + + URL string `json:"url" yaml:"url"` // Required + Description string `json:"description,omitempty" yaml:"description,omitempty"` + Variables map[string]*ServerVariable `json:"variables,omitempty" yaml:"variables,omitempty"` +} + +// BasePath returns the base path extracted from the default values of variables, if any. +// Assumes a valid struct (per Validate()). +func (server *Server) BasePath() (string, error) { + if server == nil { + return "/", nil + } + + uri := server.URL + for name, svar := range server.Variables { + uri = strings.ReplaceAll(uri, "{"+name+"}", svar.Default) + } + + u, err := url.ParseRequestURI(uri) + if err != nil { + return "", err + } + + if bp := u.Path; bp != "" { + return bp, nil + } + + return "/", nil +} + +// MarshalJSON returns the JSON encoding of Server. +func (server Server) MarshalJSON() ([]byte, error) { + m := make(map[string]interface{}, 3+len(server.Extensions)) + for k, v := range server.Extensions { + m[k] = v + } + m["url"] = server.URL + if x := server.Description; x != "" { + m["description"] = x + } + if x := server.Variables; len(x) != 0 { + m["variables"] = x + } + return json.Marshal(m) +} + +// UnmarshalJSON sets Server to a copy of data. +func (server *Server) UnmarshalJSON(data []byte) error { + type ServerBis Server + var x ServerBis + if err := json.Unmarshal(data, &x); err != nil { + return err + } + _ = json.Unmarshal(data, &x.Extensions) + delete(x.Extensions, "url") + delete(x.Extensions, "description") + delete(x.Extensions, "variables") + *server = Server(x) + return nil +} + +func (server Server) ParameterNames() ([]string, error) { + pattern := server.URL + var params []string + for len(pattern) > 0 { + i := strings.IndexByte(pattern, '{') + if i < 0 { + break + } + pattern = pattern[i+1:] + i = strings.IndexByte(pattern, '}') + if i < 0 { + return nil, errors.New("missing '}'") + } + params = append(params, strings.TrimSpace(pattern[:i])) + pattern = pattern[i+1:] + } + return params, nil +} + +func (server Server) MatchRawURL(input string) ([]string, string, bool) { + pattern := server.URL + var params []string + for len(pattern) > 0 { + c := pattern[0] + if len(pattern) == 1 && c == '/' { + break + } + if c == '{' { + // Find end of pattern + i := strings.IndexByte(pattern, '}') + if i < 0 { + return nil, "", false + } + pattern = pattern[i+1:] + + // Find next matching pattern character or next '/' whichever comes first + np := -1 + if len(pattern) > 0 { + np = strings.IndexByte(input, pattern[0]) + } + ns := strings.IndexByte(input, '/') + + if np < 0 { + i = ns + } else if ns < 0 { + i = np + } else { + i = int(math.Min(float64(np), float64(ns))) + } + if i < 0 { + i = len(input) + } + params = append(params, input[:i]) + input = input[i:] + continue + } + if len(input) == 0 || input[0] != c { + return nil, "", false + } + pattern = pattern[1:] + input = input[1:] + } + if input == "" { + input = "/" + } + if input[0] != '/' { + return nil, "", false + } + return params, input, true +} + +// Validate returns an error if Server does not comply with the OpenAPI spec. +func (server *Server) Validate(ctx context.Context, opts ...ValidationOption) (err error) { + ctx = WithValidationOptions(ctx, opts...) + + if server.URL == "" { + return errors.New("value of url must be a non-empty string") + } + + opening, closing := strings.Count(server.URL, "{"), strings.Count(server.URL, "}") + if opening != closing { + return errors.New("server URL has mismatched { and }") + } + + if opening != len(server.Variables) { + return errors.New("server has undeclared variables") + } + + variables := make([]string, 0, len(server.Variables)) + for name := range server.Variables { + variables = append(variables, name) + } + sort.Strings(variables) + for _, name := range variables { + v := server.Variables[name] + if !strings.Contains(server.URL, "{"+name+"}") { + return errors.New("server has undeclared variables") + } + if err = v.Validate(ctx); err != nil { + return + } + } + + return validateExtensions(ctx, server.Extensions) +} + +// ServerVariable is specified by OpenAPI/Swagger standard version 3. +// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#server-variable-object +type ServerVariable struct { + Extensions map[string]interface{} `json:"-" yaml:"-"` + + Enum []string `json:"enum,omitempty" yaml:"enum,omitempty"` + Default string `json:"default,omitempty" yaml:"default,omitempty"` + Description string `json:"description,omitempty" yaml:"description,omitempty"` +} + +// MarshalJSON returns the JSON encoding of ServerVariable. +func (serverVariable ServerVariable) MarshalJSON() ([]byte, error) { + m := make(map[string]interface{}, 4+len(serverVariable.Extensions)) + for k, v := range serverVariable.Extensions { + m[k] = v + } + if x := serverVariable.Enum; len(x) != 0 { + m["enum"] = x + } + if x := serverVariable.Default; x != "" { + m["default"] = x + } + if x := serverVariable.Description; x != "" { + m["description"] = x + } + return json.Marshal(m) +} + +// UnmarshalJSON sets ServerVariable to a copy of data. +func (serverVariable *ServerVariable) UnmarshalJSON(data []byte) error { + type ServerVariableBis ServerVariable + var x ServerVariableBis + if err := json.Unmarshal(data, &x); err != nil { + return err + } + _ = json.Unmarshal(data, &x.Extensions) + delete(x.Extensions, "enum") + delete(x.Extensions, "default") + delete(x.Extensions, "description") + *serverVariable = ServerVariable(x) + return nil +} + +// Validate returns an error if ServerVariable does not comply with the OpenAPI spec. +func (serverVariable *ServerVariable) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + + if serverVariable.Default == "" { + data, err := serverVariable.MarshalJSON() + if err != nil { + return err + } + return fmt.Errorf("field default is required in %s", data) + } + + return validateExtensions(ctx, serverVariable.Extensions) +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/tag.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/tag.go new file mode 100644 index 00000000..93009a13 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/tag.go @@ -0,0 +1,87 @@ +package openapi3 + +import ( + "context" + "encoding/json" + "fmt" +) + +// Tags is specified by OpenAPI/Swagger 3.0 standard. +type Tags []*Tag + +func (tags Tags) Get(name string) *Tag { + for _, tag := range tags { + if tag.Name == name { + return tag + } + } + return nil +} + +// Validate returns an error if Tags does not comply with the OpenAPI spec. +func (tags Tags) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + + for _, v := range tags { + if err := v.Validate(ctx); err != nil { + return err + } + } + return nil +} + +// Tag is specified by OpenAPI/Swagger 3.0 standard. +// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#tag-object +type Tag struct { + Extensions map[string]interface{} `json:"-" yaml:"-"` + + Name string `json:"name,omitempty" yaml:"name,omitempty"` + Description string `json:"description,omitempty" yaml:"description,omitempty"` + ExternalDocs *ExternalDocs `json:"externalDocs,omitempty" yaml:"externalDocs,omitempty"` +} + +// MarshalJSON returns the JSON encoding of Tag. +func (t Tag) MarshalJSON() ([]byte, error) { + m := make(map[string]interface{}, 3+len(t.Extensions)) + for k, v := range t.Extensions { + m[k] = v + } + if x := t.Name; x != "" { + m["name"] = x + } + if x := t.Description; x != "" { + m["description"] = x + } + if x := t.ExternalDocs; x != nil { + m["externalDocs"] = x + } + return json.Marshal(m) +} + +// UnmarshalJSON sets Tag to a copy of data. +func (t *Tag) UnmarshalJSON(data []byte) error { + type TagBis Tag + var x TagBis + if err := json.Unmarshal(data, &x); err != nil { + return err + } + _ = json.Unmarshal(data, &x.Extensions) + delete(x.Extensions, "name") + delete(x.Extensions, "description") + delete(x.Extensions, "externalDocs") + *t = Tag(x) + return nil +} + +// Validate returns an error if Tag does not comply with the OpenAPI spec. +func (t *Tag) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + + if v := t.ExternalDocs; v != nil { + if err := v.Validate(ctx); err != nil { + return fmt.Errorf("invalid external docs: %w", err) + } + } + + return validateExtensions(ctx, t.Extensions) +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/circularRef/base.yml b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/circularRef/base.yml new file mode 100644 index 00000000..ff8240eb --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/circularRef/base.yml @@ -0,0 +1,16 @@ +openapi: "3.0.3" +info: + title: Recursive cyclic refs example + version: "1.0" +components: + schemas: + Foo: + properties: + foo2: + $ref: "other.yml#/components/schemas/Foo2" + bar: + $ref: "#/components/schemas/Bar" + Bar: + properties: + foo: + $ref: "#/components/schemas/Foo" diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/circularRef/other.yml b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/circularRef/other.yml new file mode 100644 index 00000000..29b72d98 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/circularRef/other.yml @@ -0,0 +1,10 @@ +openapi: "3.0.3" +info: + title: Recursive cyclic refs example + version: "1.0" +components: + schemas: + Foo2: + properties: + id: + type: string diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Bar.yml b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Bar.yml new file mode 100644 index 00000000..cc59fc27 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Bar.yml @@ -0,0 +1,2 @@ +type: string +example: bar diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Cat.yml b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Cat.yml new file mode 100644 index 00000000..c476aa1a --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Cat.yml @@ -0,0 +1,4 @@ +type: object +properties: + cat: + $ref: ../openapi.yml#/components/schemas/Cat diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Foo.yml b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Foo.yml new file mode 100644 index 00000000..53a23366 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Foo.yml @@ -0,0 +1,4 @@ +type: object +properties: + bar: + $ref: ../openapi.yml#/components/schemas/Bar diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Foo/Foo2.yml b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Foo/Foo2.yml new file mode 100644 index 00000000..aeac81f4 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Foo/Foo2.yml @@ -0,0 +1,4 @@ +type: object +properties: + foo: + $ref: ../../openapi.yml#/components/schemas/Foo diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/models/error.yaml b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/models/error.yaml new file mode 100644 index 00000000..b4d40479 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/models/error.yaml @@ -0,0 +1,2 @@ +type: object +title: ErrorDetails diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/issue615.yml b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/issue615.yml new file mode 100644 index 00000000..d1370e32 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/issue615.yml @@ -0,0 +1,60 @@ +openapi: "3.0.3" +info: + title: Deep recursive cyclic refs example + version: "1.0" +paths: + /foo: + $ref: ./paths/foo.yml +components: + schemas: + FilterColumnIncludes: + type: object + properties: + $includes: + $ref: '#/components/schemas/FilterPredicate' + additionalProperties: false + maxProperties: 1 + minProperties: 1 + FilterPredicate: + oneOf: + - $ref: '#/components/schemas/FilterValue' + - type: array + items: + $ref: '#/components/schemas/FilterPredicate' + minLength: 1 + - $ref: '#/components/schemas/FilterPredicateOp' + - $ref: '#/components/schemas/FilterPredicateRangeOp' + FilterPredicateOp: + type: object + properties: + $any: + oneOf: + - type: array + items: + $ref: '#/components/schemas/FilterPredicate' + $none: + oneOf: + - $ref: '#/components/schemas/FilterPredicate' + - type: array + items: + $ref: '#/components/schemas/FilterPredicate' + additionalProperties: false + maxProperties: 1 + minProperties: 1 + FilterPredicateRangeOp: + type: object + properties: + $lt: + $ref: '#/components/schemas/FilterRangeValue' + additionalProperties: false + maxProperties: 2 + minProperties: 2 + FilterRangeValue: + oneOf: + - type: number + - type: string + FilterValue: + oneOf: + - type: number + - type: string + - type: boolean \ No newline at end of file diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/openapi.yml b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/openapi.yml new file mode 100644 index 00000000..9f884c71 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/openapi.yml @@ -0,0 +1,33 @@ +openapi: "3.0.3" +info: + title: Recursive refs example + version: "1.0" +paths: + /foo: + $ref: ./paths/foo.yml + /double-ref-foo: + get: + summary: Double ref response + description: Reference response with double reference. + responses: + "400": + $ref: "#/components/responses/400" +components: + schemas: + Foo: + $ref: ./components/Foo.yml + Foo2: + $ref: ./components/Foo/Foo2.yml + Bar: + $ref: ./components/Bar.yml + Cat: + $ref: ./components/Cat.yml + Error: + $ref: ./components/models/error.yaml + responses: + "400": + description: 400 Bad Request + content: + application/json: + schema: + $ref: "#/components/schemas/Error" diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/openapi.yml.internalized.yml b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/openapi.yml.internalized.yml new file mode 100644 index 00000000..0d508527 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/openapi.yml.internalized.yml @@ -0,0 +1,110 @@ +{ + "components": { + "parameters": { + "number": { + "in": "query", + "name": "someNumber", + "schema": { + "type": "string" + } + } + }, + "responses": { + "400": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + }, + "description": "400 Bad Request" + } + }, + "schemas": { + "Bar": { + "example": "bar", + "type": "string" + }, + "Error":{ + "title":"ErrorDetails", + "type":"object" + }, + "Foo": { + "properties": { + "bar": { + "$ref": "#/components/schemas/Bar" + } + }, + "type": "object" + }, + "Foo2": { + "properties": { + "foo": { + "$ref": "#/components/schemas/Foo" + } + }, + "type": "object" + }, + "error":{ + "title":"ErrorDetails", + "type":"object" + }, + "Cat": { + "properties": { + "cat": { + "$ref": "#/components/schemas/Cat" + } + }, + "type": "object" + } + } + }, + "info": { + "title": "Recursive refs example", + "version": "1.0" + }, + "openapi": "3.0.3", + "paths": { + "/double-ref-foo": { + "get": { + "description": "Reference response with double reference.", + "responses": { + "400": { + "$ref": "#/components/responses/400" + } + }, + "summary": "Double ref response" + } + }, + "/foo": { + "get": { + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "properties": { + "foo2": { + "$ref": "#/components/schemas/Foo2" + } + }, + "type": "object" + } + } + }, + "description": "OK" + }, + "400": { + "$ref": "#/components/responses/400" + } + } + }, + "parameters": [ + { + "$ref": "#/components/parameters/number" + } + ] + } + } +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/parameters/number.yml b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/parameters/number.yml new file mode 100644 index 00000000..29f0f264 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/parameters/number.yml @@ -0,0 +1,4 @@ +name: someNumber +in: query +schema: + type: string diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/paths/foo.yml b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/paths/foo.yml new file mode 100644 index 00000000..4c845b53 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/paths/foo.yml @@ -0,0 +1,15 @@ +parameters: + - $ref: ../parameters/number.yml +get: + responses: + "200": + description: OK + content: + application/json: + schema: + type: object + properties: + foo2: + $ref: ../openapi.yml#/components/schemas/Foo2 + "400": + $ref: "../openapi.yml#/components/responses/400" diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/validation_options.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/validation_options.go new file mode 100644 index 00000000..0ca12e5a --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/validation_options.go @@ -0,0 +1,112 @@ +package openapi3 + +import "context" + +// ValidationOption allows the modification of how the OpenAPI document is validated. +type ValidationOption func(options *ValidationOptions) + +// ValidationOptions provides configuration for validating OpenAPI documents. +type ValidationOptions struct { + examplesValidationAsReq, examplesValidationAsRes bool + examplesValidationDisabled bool + schemaDefaultsValidationDisabled bool + schemaFormatValidationEnabled bool + schemaPatternValidationDisabled bool + extraSiblingFieldsAllowed map[string]struct{} +} + +type validationOptionsKey struct{} + +// AllowExtraSiblingFields called as AllowExtraSiblingFields("description") makes Validate not return an error when said field appears next to a $ref. +func AllowExtraSiblingFields(fields ...string) ValidationOption { + return func(options *ValidationOptions) { + for _, field := range fields { + if options.extraSiblingFieldsAllowed == nil { + options.extraSiblingFieldsAllowed = make(map[string]struct{}, len(fields)) + } + options.extraSiblingFieldsAllowed[field] = struct{}{} + } + } +} + +// EnableSchemaFormatValidation makes Validate not return an error when validating documents that mention schema formats that are not defined by the OpenAPIv3 specification. +// By default, schema format validation is disabled. +func EnableSchemaFormatValidation() ValidationOption { + return func(options *ValidationOptions) { + options.schemaFormatValidationEnabled = true + } +} + +// DisableSchemaFormatValidation does the opposite of EnableSchemaFormatValidation. +// By default, schema format validation is disabled. +func DisableSchemaFormatValidation() ValidationOption { + return func(options *ValidationOptions) { + options.schemaFormatValidationEnabled = false + } +} + +// EnableSchemaPatternValidation does the opposite of DisableSchemaPatternValidation. +// By default, schema pattern validation is enabled. +func EnableSchemaPatternValidation() ValidationOption { + return func(options *ValidationOptions) { + options.schemaPatternValidationDisabled = false + } +} + +// DisableSchemaPatternValidation makes Validate not return an error when validating patterns that are not supported by the Go regexp engine. +func DisableSchemaPatternValidation() ValidationOption { + return func(options *ValidationOptions) { + options.schemaPatternValidationDisabled = true + } +} + +// EnableSchemaDefaultsValidation does the opposite of DisableSchemaDefaultsValidation. +// By default, schema default values are validated against their schema. +func EnableSchemaDefaultsValidation() ValidationOption { + return func(options *ValidationOptions) { + options.schemaDefaultsValidationDisabled = false + } +} + +// DisableSchemaDefaultsValidation disables schemas' default field validation. +// By default, schema default values are validated against their schema. +func DisableSchemaDefaultsValidation() ValidationOption { + return func(options *ValidationOptions) { + options.schemaDefaultsValidationDisabled = true + } +} + +// EnableExamplesValidation does the opposite of DisableExamplesValidation. +// By default, all schema examples are validated. +func EnableExamplesValidation() ValidationOption { + return func(options *ValidationOptions) { + options.examplesValidationDisabled = false + } +} + +// DisableExamplesValidation disables all example schema validation. +// By default, all schema examples are validated. +func DisableExamplesValidation() ValidationOption { + return func(options *ValidationOptions) { + options.examplesValidationDisabled = true + } +} + +// WithValidationOptions allows adding validation options to a context object that can be used when validationg any OpenAPI type. +func WithValidationOptions(ctx context.Context, opts ...ValidationOption) context.Context { + if len(opts) == 0 { + return ctx + } + options := &ValidationOptions{} + for _, opt := range opts { + opt(options) + } + return context.WithValue(ctx, validationOptionsKey{}, options) +} + +func getValidationOptions(ctx context.Context) *ValidationOptions { + if options, ok := ctx.Value(validationOptionsKey{}).(*ValidationOptions); ok { + return options + } + return &ValidationOptions{} +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/visited.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/visited.go new file mode 100644 index 00000000..67f970e3 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/visited.go @@ -0,0 +1,41 @@ +package openapi3 + +func newVisited() visitedComponent { + return visitedComponent{ + header: make(map[*Header]struct{}), + schema: make(map[*Schema]struct{}), + } +} + +type visitedComponent struct { + header map[*Header]struct{} + schema map[*Schema]struct{} +} + +// resetVisited clears visitedComponent map +// should be called before recursion over doc *T +func (doc *T) resetVisited() { + doc.visited = newVisited() +} + +// isVisitedHeader returns `true` if the *Header pointer was already visited +// otherwise it returns `false` +func (doc *T) isVisitedHeader(h *Header) bool { + if _, ok := doc.visited.header[h]; ok { + return true + } + + doc.visited.header[h] = struct{}{} + return false +} + +// isVisitedHeader returns `true` if the *Schema pointer was already visited +// otherwise it returns `false` +func (doc *T) isVisitedSchema(s *Schema) bool { + if _, ok := doc.visited.schema[s]; ok { + return true + } + + doc.visited.schema[s] = struct{}{} + return false +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3/xml.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/xml.go new file mode 100644 index 00000000..34ed3be3 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3/xml.go @@ -0,0 +1,66 @@ +package openapi3 + +import ( + "context" + "encoding/json" +) + +// XML is specified by OpenAPI/Swagger standard version 3. +// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#xml-object +type XML struct { + Extensions map[string]interface{} `json:"-" yaml:"-"` + + Name string `json:"name,omitempty" yaml:"name,omitempty"` + Namespace string `json:"namespace,omitempty" yaml:"namespace,omitempty"` + Prefix string `json:"prefix,omitempty" yaml:"prefix,omitempty"` + Attribute bool `json:"attribute,omitempty" yaml:"attribute,omitempty"` + Wrapped bool `json:"wrapped,omitempty" yaml:"wrapped,omitempty"` +} + +// MarshalJSON returns the JSON encoding of XML. +func (xml XML) MarshalJSON() ([]byte, error) { + m := make(map[string]interface{}, 5+len(xml.Extensions)) + for k, v := range xml.Extensions { + m[k] = v + } + if x := xml.Name; x != "" { + m["name"] = x + } + if x := xml.Namespace; x != "" { + m["namespace"] = x + } + if x := xml.Prefix; x != "" { + m["prefix"] = x + } + if x := xml.Attribute; x { + m["attribute"] = x + } + if x := xml.Wrapped; x { + m["wrapped"] = x + } + return json.Marshal(m) +} + +// UnmarshalJSON sets XML to a copy of data. +func (xml *XML) UnmarshalJSON(data []byte) error { + type XMLBis XML + var x XMLBis + if err := json.Unmarshal(data, &x); err != nil { + return err + } + _ = json.Unmarshal(data, &x.Extensions) + delete(x.Extensions, "name") + delete(x.Extensions, "namespace") + delete(x.Extensions, "prefix") + delete(x.Extensions, "attribute") + delete(x.Extensions, "wrapped") + *xml = XML(x) + return nil +} + +// Validate returns an error if XML does not comply with the OpenAPI spec. +func (xml *XML) Validate(ctx context.Context, opts ...ValidationOption) error { + ctx = WithValidationOptions(ctx, opts...) + + return validateExtensions(ctx, xml.Extensions) +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/authentication_input.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/authentication_input.go new file mode 100644 index 00000000..a53484b9 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/authentication_input.go @@ -0,0 +1,29 @@ +package openapi3filter + +import ( + "fmt" + + "github.com/getkin/kin-openapi/openapi3" +) + +type AuthenticationInput struct { + RequestValidationInput *RequestValidationInput + SecuritySchemeName string + SecurityScheme *openapi3.SecurityScheme + Scopes []string +} + +func (input *AuthenticationInput) NewError(err error) error { + if err == nil { + if len(input.Scopes) == 0 { + err = fmt.Errorf("security requirement %q failed", input.SecuritySchemeName) + } else { + err = fmt.Errorf("security requirement %q (scopes: %+v) failed", input.SecuritySchemeName, input.Scopes) + } + } + return &RequestError{ + Input: input.RequestValidationInput, + Reason: "authorization failed", + Err: err, + } +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/errors.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/errors.go new file mode 100644 index 00000000..b5454a75 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/errors.go @@ -0,0 +1,92 @@ +package openapi3filter + +import ( + "bytes" + "fmt" + + "github.com/getkin/kin-openapi/openapi3" +) + +var _ error = &RequestError{} + +// RequestError is returned by ValidateRequest when request does not match OpenAPI spec +type RequestError struct { + Input *RequestValidationInput + Parameter *openapi3.Parameter + RequestBody *openapi3.RequestBody + Reason string + Err error +} + +var _ interface{ Unwrap() error } = RequestError{} + +func (err *RequestError) Error() string { + reason := err.Reason + if e := err.Err; e != nil { + if len(reason) == 0 || reason == e.Error() { + reason = e.Error() + } else { + reason += ": " + e.Error() + } + } + if v := err.Parameter; v != nil { + return fmt.Sprintf("parameter %q in %s has an error: %s", v.Name, v.In, reason) + } else if v := err.RequestBody; v != nil { + return fmt.Sprintf("request body has an error: %s", reason) + } else { + return reason + } +} + +func (err RequestError) Unwrap() error { + return err.Err +} + +var _ error = &ResponseError{} + +// ResponseError is returned by ValidateResponse when response does not match OpenAPI spec +type ResponseError struct { + Input *ResponseValidationInput + Reason string + Err error +} + +var _ interface{ Unwrap() error } = ResponseError{} + +func (err *ResponseError) Error() string { + reason := err.Reason + if e := err.Err; e != nil { + if len(reason) == 0 { + reason = e.Error() + } else { + reason += ": " + e.Error() + } + } + return reason +} + +func (err ResponseError) Unwrap() error { + return err.Err +} + +var _ error = &SecurityRequirementsError{} + +// SecurityRequirementsError is returned by ValidateSecurityRequirements +// when no requirement is met. +type SecurityRequirementsError struct { + SecurityRequirements openapi3.SecurityRequirements + Errors []error +} + +func (err *SecurityRequirementsError) Error() string { + buff := &bytes.Buffer{} + buff.WriteString("security requirements failed: ") + for i, e := range err.Errors { + buff.WriteString(e.Error()) + if i != len(err.Errors)-1 { + buff.WriteString(" | ") + } + } + + return buff.String() +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/internal.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/internal.go new file mode 100644 index 00000000..5c6a8a6c --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/internal.go @@ -0,0 +1,25 @@ +package openapi3filter + +import ( + "reflect" + "strings" +) + +func parseMediaType(contentType string) string { + i := strings.IndexByte(contentType, ';') + if i < 0 { + return contentType + } + return contentType[:i] +} + +func isNilValue(value interface{}) bool { + if value == nil { + return true + } + switch reflect.TypeOf(value).Kind() { + case reflect.Ptr, reflect.Map, reflect.Array, reflect.Chan, reflect.Slice: + return reflect.ValueOf(value).IsNil() + } + return false +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/middleware.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/middleware.go new file mode 100644 index 00000000..3bcb9db4 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/middleware.go @@ -0,0 +1,283 @@ +package openapi3filter + +import ( + "bytes" + "io" + "io/ioutil" + "log" + "net/http" + + "github.com/getkin/kin-openapi/routers" +) + +// Validator provides HTTP request and response validation middleware. +type Validator struct { + router routers.Router + errFunc ErrFunc + logFunc LogFunc + strict bool + options Options +} + +// ErrFunc handles errors that may occur during validation. +type ErrFunc func(w http.ResponseWriter, status int, code ErrCode, err error) + +// LogFunc handles log messages that may occur during validation. +type LogFunc func(message string, err error) + +// ErrCode is used for classification of different types of errors that may +// occur during validation. These may be used to write an appropriate response +// in ErrFunc. +type ErrCode int + +const ( + // ErrCodeOK indicates no error. It is also the default value. + ErrCodeOK = 0 + // ErrCodeCannotFindRoute happens when the validator fails to resolve the + // request to a defined OpenAPI route. + ErrCodeCannotFindRoute = iota + // ErrCodeRequestInvalid happens when the inbound request does not conform + // to the OpenAPI 3 specification. + ErrCodeRequestInvalid = iota + // ErrCodeResponseInvalid happens when the wrapped handler response does + // not conform to the OpenAPI 3 specification. + ErrCodeResponseInvalid = iota +) + +func (e ErrCode) responseText() string { + switch e { + case ErrCodeOK: + return "OK" + case ErrCodeCannotFindRoute: + return "not found" + case ErrCodeRequestInvalid: + return "bad request" + default: + return "server error" + } +} + +// NewValidator returns a new response validation middlware, using the given +// routes from an OpenAPI 3 specification. +func NewValidator(router routers.Router, options ...ValidatorOption) *Validator { + v := &Validator{ + router: router, + errFunc: func(w http.ResponseWriter, status int, code ErrCode, _ error) { + http.Error(w, code.responseText(), status) + }, + logFunc: func(message string, err error) { + log.Printf("%s: %v", message, err) + }, + } + for i := range options { + options[i](v) + } + return v +} + +// ValidatorOption defines an option that may be specified when creating a +// Validator. +type ValidatorOption func(*Validator) + +// OnErr provides a callback that handles writing an HTTP response on a +// validation error. This allows customization of error responses without +// prescribing a particular form. This callback is only called on response +// validator errors in Strict mode. +func OnErr(f ErrFunc) ValidatorOption { + return func(v *Validator) { + v.errFunc = f + } +} + +// OnLog provides a callback that handles logging in the Validator. This allows +// the validator to integrate with a services' existing logging system without +// prescribing a particular one. +func OnLog(f LogFunc) ValidatorOption { + return func(v *Validator) { + v.logFunc = f + } +} + +// Strict, if set, causes an internal server error to be sent if the wrapped +// handler response fails response validation. If not set, the response is sent +// and the error is only logged. +func Strict(strict bool) ValidatorOption { + return func(v *Validator) { + v.strict = strict + } +} + +// ValidationOptions sets request/response validation options on the validator. +func ValidationOptions(options Options) ValidatorOption { + return func(v *Validator) { + v.options = options + } +} + +// Middleware returns an http.Handler which wraps the given handler with +// request and response validation. +func (v *Validator) Middleware(h http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + route, pathParams, err := v.router.FindRoute(r) + if err != nil { + v.logFunc("validation error: failed to find route for "+r.URL.String(), err) + v.errFunc(w, http.StatusNotFound, ErrCodeCannotFindRoute, err) + return + } + requestValidationInput := &RequestValidationInput{ + Request: r, + PathParams: pathParams, + Route: route, + Options: &v.options, + } + if err = ValidateRequest(r.Context(), requestValidationInput); err != nil { + v.logFunc("invalid request", err) + v.errFunc(w, http.StatusBadRequest, ErrCodeRequestInvalid, err) + return + } + + var wr responseWrapper + if v.strict { + wr = &strictResponseWrapper{w: w} + } else { + wr = newWarnResponseWrapper(w) + } + + h.ServeHTTP(wr, r) + + if err = ValidateResponse(r.Context(), &ResponseValidationInput{ + RequestValidationInput: requestValidationInput, + Status: wr.statusCode(), + Header: wr.Header(), + Body: ioutil.NopCloser(bytes.NewBuffer(wr.bodyContents())), + Options: &v.options, + }); err != nil { + v.logFunc("invalid response", err) + if v.strict { + v.errFunc(w, http.StatusInternalServerError, ErrCodeResponseInvalid, err) + } + return + } + + if err = wr.flushBodyContents(); err != nil { + v.logFunc("failed to write response", err) + } + }) +} + +type responseWrapper interface { + http.ResponseWriter + + // flushBodyContents writes the buffered response to the client, if it has + // not yet been written. + flushBodyContents() error + + // statusCode returns the response status code, 0 if not set yet. + statusCode() int + + // bodyContents returns the buffered + bodyContents() []byte +} + +type warnResponseWrapper struct { + w http.ResponseWriter + headerWritten bool + status int + body bytes.Buffer + tee io.Writer +} + +func newWarnResponseWrapper(w http.ResponseWriter) *warnResponseWrapper { + wr := &warnResponseWrapper{ + w: w, + } + wr.tee = io.MultiWriter(w, &wr.body) + return wr +} + +// Write implements http.ResponseWriter. +func (wr *warnResponseWrapper) Write(b []byte) (int, error) { + if !wr.headerWritten { + wr.WriteHeader(http.StatusOK) + } + return wr.tee.Write(b) +} + +// WriteHeader implements http.ResponseWriter. +func (wr *warnResponseWrapper) WriteHeader(status int) { + if !wr.headerWritten { + // If the header hasn't been written, record the status for response + // validation. + wr.status = status + wr.headerWritten = true + } + wr.w.WriteHeader(wr.status) +} + +// Header implements http.ResponseWriter. +func (wr *warnResponseWrapper) Header() http.Header { + return wr.w.Header() +} + +// Flush implements the optional http.Flusher interface. +func (wr *warnResponseWrapper) Flush() { + // If the wrapped http.ResponseWriter implements optional http.Flusher, + // pass through. + if fl, ok := wr.w.(http.Flusher); ok { + fl.Flush() + } +} + +func (wr *warnResponseWrapper) flushBodyContents() error { + return nil +} + +func (wr *warnResponseWrapper) statusCode() int { + return wr.status +} + +func (wr *warnResponseWrapper) bodyContents() []byte { + return wr.body.Bytes() +} + +type strictResponseWrapper struct { + w http.ResponseWriter + headerWritten bool + status int + body bytes.Buffer +} + +// Write implements http.ResponseWriter. +func (wr *strictResponseWrapper) Write(b []byte) (int, error) { + if !wr.headerWritten { + wr.WriteHeader(http.StatusOK) + } + return wr.body.Write(b) +} + +// WriteHeader implements http.ResponseWriter. +func (wr *strictResponseWrapper) WriteHeader(status int) { + if !wr.headerWritten { + wr.status = status + wr.headerWritten = true + } +} + +// Header implements http.ResponseWriter. +func (wr *strictResponseWrapper) Header() http.Header { + return wr.w.Header() +} + +func (wr *strictResponseWrapper) flushBodyContents() error { + wr.w.WriteHeader(wr.status) + _, err := wr.w.Write(wr.body.Bytes()) + return err +} + +func (wr *strictResponseWrapper) statusCode() int { + return wr.status +} + +func (wr *strictResponseWrapper) bodyContents() []byte { + return wr.body.Bytes() +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/options.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/options.go new file mode 100644 index 00000000..4bda60a2 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/options.go @@ -0,0 +1,44 @@ +package openapi3filter + +import "github.com/getkin/kin-openapi/openapi3" + +// Options used by ValidateRequest and ValidateResponse +type Options struct { + // Set ExcludeRequestBody so ValidateRequest skips request body validation + ExcludeRequestBody bool + + // Set ExcludeResponseBody so ValidateResponse skips response body validation + ExcludeResponseBody bool + + // Set ExcludeReadOnlyValidations so ValidateRequest skips read-only validations + ExcludeReadOnlyValidations bool + + // Set ExcludeWriteOnlyValidations so ValidateResponse skips write-only validations + ExcludeWriteOnlyValidations bool + + // Set IncludeResponseStatus so ValidateResponse fails on response + // status not defined in OpenAPI spec + IncludeResponseStatus bool + + MultiError bool + + // A document with security schemes defined will not pass validation + // unless an AuthenticationFunc is defined. + // See NoopAuthenticationFunc + AuthenticationFunc AuthenticationFunc + + // Indicates whether default values are set in the + // request. If true, then they are not set + SkipSettingDefaults bool + + customSchemaErrorFunc CustomSchemaErrorFunc +} + +// CustomSchemaErrorFunc allows for custom the schema error message. +type CustomSchemaErrorFunc func(err *openapi3.SchemaError) string + +// WithCustomSchemaErrorFunc sets a function to override the schema error message. +// If the passed function returns an empty string, it returns to the previous Error() implementation. +func (o *Options) WithCustomSchemaErrorFunc(f CustomSchemaErrorFunc) { + o.customSchemaErrorFunc = f +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/req_resp_decoder.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/req_resp_decoder.go new file mode 100644 index 00000000..fd28c2f3 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/req_resp_decoder.go @@ -0,0 +1,1295 @@ +package openapi3filter + +import ( + "archive/zip" + "bytes" + "encoding/csv" + "encoding/json" + "errors" + "fmt" + "io" + "io/ioutil" + "mime" + "mime/multipart" + "net/http" + "net/url" + "regexp" + "strconv" + "strings" + + "gopkg.in/yaml.v3" + + "github.com/getkin/kin-openapi/openapi3" +) + +// ParseErrorKind describes a kind of ParseError. +// The type simplifies comparison of errors. +type ParseErrorKind int + +const ( + // KindOther describes an untyped parsing error. + KindOther ParseErrorKind = iota + // KindUnsupportedFormat describes an error that happens when a value has an unsupported format. + KindUnsupportedFormat + // KindInvalidFormat describes an error that happens when a value does not conform a format + // that is required by a serialization method. + KindInvalidFormat +) + +// ParseError describes errors which happens while parse operation's parameters, requestBody, or response. +type ParseError struct { + Kind ParseErrorKind + Value interface{} + Reason string + Cause error + + path []interface{} +} + +var _ interface{ Unwrap() error } = ParseError{} + +func (e *ParseError) Error() string { + var msg []string + if p := e.Path(); len(p) > 0 { + var arr []string + for _, v := range p { + arr = append(arr, fmt.Sprintf("%v", v)) + } + msg = append(msg, fmt.Sprintf("path %v", strings.Join(arr, "."))) + } + msg = append(msg, e.innerError()) + return strings.Join(msg, ": ") +} + +func (e *ParseError) innerError() string { + var msg []string + if e.Value != nil { + msg = append(msg, fmt.Sprintf("value %v", e.Value)) + } + if e.Reason != "" { + msg = append(msg, e.Reason) + } + if e.Cause != nil { + if v, ok := e.Cause.(*ParseError); ok { + msg = append(msg, v.innerError()) + } else { + msg = append(msg, e.Cause.Error()) + } + } + return strings.Join(msg, ": ") +} + +// RootCause returns a root cause of ParseError. +func (e *ParseError) RootCause() error { + if v, ok := e.Cause.(*ParseError); ok { + return v.RootCause() + } + return e.Cause +} + +func (e ParseError) Unwrap() error { + return e.Cause +} + +// Path returns a path to the root cause. +func (e *ParseError) Path() []interface{} { + var path []interface{} + if v, ok := e.Cause.(*ParseError); ok { + p := v.Path() + if len(p) > 0 { + path = append(path, p...) + } + } + if len(e.path) > 0 { + path = append(path, e.path...) + } + return path +} + +func invalidSerializationMethodErr(sm *openapi3.SerializationMethod) error { + return fmt.Errorf("invalid serialization method: style=%q, explode=%v", sm.Style, sm.Explode) +} + +// Decodes a parameter defined via the content property as an object. It uses +// the user specified decoder, or our build-in decoder for application/json +func decodeContentParameter(param *openapi3.Parameter, input *RequestValidationInput) ( + value interface{}, + schema *openapi3.Schema, + found bool, + err error, +) { + var paramValues []string + switch param.In { + case openapi3.ParameterInPath: + var paramValue string + if paramValue, found = input.PathParams[param.Name]; found { + paramValues = []string{paramValue} + } + case openapi3.ParameterInQuery: + paramValues, found = input.GetQueryParams()[param.Name] + case openapi3.ParameterInHeader: + var headerValues []string + if headerValues, found = input.Request.Header[http.CanonicalHeaderKey(param.Name)]; found { + paramValues = headerValues + } + case openapi3.ParameterInCookie: + var cookie *http.Cookie + if cookie, err = input.Request.Cookie(param.Name); err == http.ErrNoCookie { + found = false + } else if err != nil { + return + } else { + paramValues = []string{cookie.Value} + found = true + } + default: + err = fmt.Errorf("unsupported parameter.in: %q", param.In) + return + } + + if !found { + if param.Required { + err = fmt.Errorf("parameter %q is required, but missing", param.Name) + } + return + } + + decoder := input.ParamDecoder + if decoder == nil { + decoder = defaultContentParameterDecoder + } + + value, schema, err = decoder(param, paramValues) + return +} + +func defaultContentParameterDecoder(param *openapi3.Parameter, values []string) ( + outValue interface{}, + outSchema *openapi3.Schema, + err error, +) { + // Only query parameters can have multiple values. + if len(values) > 1 && param.In != openapi3.ParameterInQuery { + err = fmt.Errorf("%s parameter %q cannot have multiple values", param.In, param.Name) + return + } + + content := param.Content + if content == nil { + err = fmt.Errorf("parameter %q expected to have content", param.Name) + return + } + // We only know how to decode a parameter if it has one content, application/json + if len(content) != 1 { + err = fmt.Errorf("multiple content types for parameter %q", param.Name) + return + } + + mt := content.Get("application/json") + if mt == nil { + err = fmt.Errorf("parameter %q has no content schema", param.Name) + return + } + outSchema = mt.Schema.Value + + unmarshal := func(encoded string, paramSchema *openapi3.SchemaRef) (decoded interface{}, err error) { + if err = json.Unmarshal([]byte(encoded), &decoded); err != nil { + if paramSchema != nil && paramSchema.Value.Type != "object" { + decoded, err = encoded, nil + } + } + return + } + + if len(values) == 1 { + if outValue, err = unmarshal(values[0], mt.Schema); err != nil { + err = fmt.Errorf("error unmarshaling parameter %q", param.Name) + return + } + } else { + outArray := make([]interface{}, 0, len(values)) + for _, v := range values { + var item interface{} + if item, err = unmarshal(v, outSchema.Items); err != nil { + err = fmt.Errorf("error unmarshaling parameter %q", param.Name) + return + } + outArray = append(outArray, item) + } + outValue = outArray + } + return +} + +type valueDecoder interface { + DecodePrimitive(param string, sm *openapi3.SerializationMethod, schema *openapi3.SchemaRef) (interface{}, bool, error) + DecodeArray(param string, sm *openapi3.SerializationMethod, schema *openapi3.SchemaRef) ([]interface{}, bool, error) + DecodeObject(param string, sm *openapi3.SerializationMethod, schema *openapi3.SchemaRef) (map[string]interface{}, bool, error) +} + +// decodeStyledParameter returns a value of an operation's parameter from HTTP request for +// parameters defined using the style format, and whether the parameter is supplied in the input. +// The function returns ParseError when HTTP request contains an invalid value of a parameter. +func decodeStyledParameter(param *openapi3.Parameter, input *RequestValidationInput) (interface{}, bool, error) { + sm, err := param.SerializationMethod() + if err != nil { + return nil, false, err + } + + var dec valueDecoder + switch param.In { + case openapi3.ParameterInPath: + if len(input.PathParams) == 0 { + return nil, false, nil + } + dec = &pathParamDecoder{pathParams: input.PathParams} + case openapi3.ParameterInQuery: + if len(input.GetQueryParams()) == 0 { + return nil, false, nil + } + dec = &urlValuesDecoder{values: input.GetQueryParams()} + case openapi3.ParameterInHeader: + dec = &headerParamDecoder{header: input.Request.Header} + case openapi3.ParameterInCookie: + dec = &cookieParamDecoder{req: input.Request} + default: + return nil, false, fmt.Errorf("unsupported parameter's 'in': %s", param.In) + } + + return decodeValue(dec, param.Name, sm, param.Schema, param.Required) +} + +func decodeValue(dec valueDecoder, param string, sm *openapi3.SerializationMethod, schema *openapi3.SchemaRef, required bool) (interface{}, bool, error) { + var found bool + + if len(schema.Value.AllOf) > 0 { + var value interface{} + var err error + for _, sr := range schema.Value.AllOf { + var f bool + value, f, err = decodeValue(dec, param, sm, sr, required) + found = found || f + if value == nil || err != nil { + break + } + } + return value, found, err + } + + if len(schema.Value.AnyOf) > 0 { + for _, sr := range schema.Value.AnyOf { + value, f, _ := decodeValue(dec, param, sm, sr, required) + found = found || f + if value != nil { + return value, found, nil + } + } + if required { + return nil, found, fmt.Errorf("decoding anyOf for parameter %q failed", param) + } + return nil, found, nil + } + + if len(schema.Value.OneOf) > 0 { + isMatched := 0 + var value interface{} + for _, sr := range schema.Value.OneOf { + v, f, _ := decodeValue(dec, param, sm, sr, required) + found = found || f + if v != nil { + value = v + isMatched++ + } + } + if isMatched >= 1 { + return value, found, nil + } + if required { + return nil, found, fmt.Errorf("decoding oneOf failed: %q is required", param) + } + return nil, found, nil + } + + if schema.Value.Not != nil { + // TODO(decode not): handle decoding "not" JSON Schema + return nil, found, errors.New("not implemented: decoding 'not'") + } + + if schema.Value.Type != "" { + var decodeFn func(param string, sm *openapi3.SerializationMethod, schema *openapi3.SchemaRef) (interface{}, bool, error) + switch schema.Value.Type { + case "array": + decodeFn = func(param string, sm *openapi3.SerializationMethod, schema *openapi3.SchemaRef) (interface{}, bool, error) { + return dec.DecodeArray(param, sm, schema) + } + case "object": + decodeFn = func(param string, sm *openapi3.SerializationMethod, schema *openapi3.SchemaRef) (interface{}, bool, error) { + return dec.DecodeObject(param, sm, schema) + } + default: + decodeFn = dec.DecodePrimitive + } + return decodeFn(param, sm, schema) + } + switch vDecoder := dec.(type) { + case *pathParamDecoder: + _, found = vDecoder.pathParams[param] + case *urlValuesDecoder: + if schema.Value.Pattern != "" { + return dec.DecodePrimitive(param, sm, schema) + } + _, found = vDecoder.values[param] + case *headerParamDecoder: + _, found = vDecoder.header[http.CanonicalHeaderKey(param)] + case *cookieParamDecoder: + _, err := vDecoder.req.Cookie(param) + found = err != http.ErrNoCookie + default: + return nil, found, errors.New("unsupported decoder") + } + return nil, found, nil +} + +// pathParamDecoder decodes values of path parameters. +type pathParamDecoder struct { + pathParams map[string]string +} + +func (d *pathParamDecoder) DecodePrimitive(param string, sm *openapi3.SerializationMethod, schema *openapi3.SchemaRef) (interface{}, bool, error) { + var prefix string + switch sm.Style { + case "simple": + // A prefix is empty for style "simple". + case "label": + prefix = "." + case "matrix": + prefix = ";" + param + "=" + default: + return nil, false, invalidSerializationMethodErr(sm) + } + + if d.pathParams == nil { + // HTTP request does not contains a value of the target path parameter. + return nil, false, nil + } + raw, ok := d.pathParams[param] + if !ok || raw == "" { + // HTTP request does not contains a value of the target path parameter. + return nil, false, nil + } + src, err := cutPrefix(raw, prefix) + if err != nil { + return nil, ok, err + } + val, err := parsePrimitive(src, schema) + return val, ok, err +} + +func (d *pathParamDecoder) DecodeArray(param string, sm *openapi3.SerializationMethod, schema *openapi3.SchemaRef) ([]interface{}, bool, error) { + var prefix, delim string + switch { + case sm.Style == "simple": + delim = "," + case sm.Style == "label" && !sm.Explode: + prefix = "." + delim = "," + case sm.Style == "label" && sm.Explode: + prefix = "." + delim = "." + case sm.Style == "matrix" && !sm.Explode: + prefix = ";" + param + "=" + delim = "," + case sm.Style == "matrix" && sm.Explode: + prefix = ";" + param + "=" + delim = ";" + param + "=" + default: + return nil, false, invalidSerializationMethodErr(sm) + } + + if d.pathParams == nil { + // HTTP request does not contains a value of the target path parameter. + return nil, false, nil + } + raw, ok := d.pathParams[param] + if !ok || raw == "" { + // HTTP request does not contains a value of the target path parameter. + return nil, false, nil + } + src, err := cutPrefix(raw, prefix) + if err != nil { + return nil, ok, err + } + val, err := parseArray(strings.Split(src, delim), schema) + return val, ok, err +} + +func (d *pathParamDecoder) DecodeObject(param string, sm *openapi3.SerializationMethod, schema *openapi3.SchemaRef) (map[string]interface{}, bool, error) { + var prefix, propsDelim, valueDelim string + switch { + case sm.Style == "simple" && !sm.Explode: + propsDelim = "," + valueDelim = "," + case sm.Style == "simple" && sm.Explode: + propsDelim = "," + valueDelim = "=" + case sm.Style == "label" && !sm.Explode: + prefix = "." + propsDelim = "," + valueDelim = "," + case sm.Style == "label" && sm.Explode: + prefix = "." + propsDelim = "." + valueDelim = "=" + case sm.Style == "matrix" && !sm.Explode: + prefix = ";" + param + "=" + propsDelim = "," + valueDelim = "," + case sm.Style == "matrix" && sm.Explode: + prefix = ";" + propsDelim = ";" + valueDelim = "=" + default: + return nil, false, invalidSerializationMethodErr(sm) + } + + if d.pathParams == nil { + // HTTP request does not contains a value of the target path parameter. + return nil, false, nil + } + raw, ok := d.pathParams[param] + if !ok || raw == "" { + // HTTP request does not contains a value of the target path parameter. + return nil, false, nil + } + src, err := cutPrefix(raw, prefix) + if err != nil { + return nil, ok, err + } + props, err := propsFromString(src, propsDelim, valueDelim) + if err != nil { + return nil, ok, err + } + val, err := makeObject(props, schema) + return val, ok, err +} + +// cutPrefix validates that a raw value of a path parameter has the specified prefix, +// and returns a raw value without the prefix. +func cutPrefix(raw, prefix string) (string, error) { + if prefix == "" { + return raw, nil + } + if len(raw) < len(prefix) || raw[:len(prefix)] != prefix { + return "", &ParseError{ + Kind: KindInvalidFormat, + Value: raw, + Reason: fmt.Sprintf("a value must be prefixed with %q", prefix), + } + } + return raw[len(prefix):], nil +} + +// urlValuesDecoder decodes values of query parameters. +type urlValuesDecoder struct { + values url.Values +} + +func (d *urlValuesDecoder) DecodePrimitive(param string, sm *openapi3.SerializationMethod, schema *openapi3.SchemaRef) (interface{}, bool, error) { + if sm.Style != "form" { + return nil, false, invalidSerializationMethodErr(sm) + } + + values, ok := d.values[param] + if len(values) == 0 { + // HTTP request does not contain a value of the target query parameter. + return nil, ok, nil + } + + if schema.Value.Type == "" && schema.Value.Pattern != "" { + return values[0], ok, nil + } + val, err := parsePrimitive(values[0], schema) + return val, ok, err +} + +func (d *urlValuesDecoder) DecodeArray(param string, sm *openapi3.SerializationMethod, schema *openapi3.SchemaRef) ([]interface{}, bool, error) { + if sm.Style == "deepObject" { + return nil, false, invalidSerializationMethodErr(sm) + } + + values, ok := d.values[param] + if len(values) == 0 { + // HTTP request does not contain a value of the target query parameter. + return nil, ok, nil + } + if !sm.Explode { + var delim string + switch sm.Style { + case "form": + delim = "," + case "spaceDelimited": + delim = " " + case "pipeDelimited": + delim = "|" + } + values = strings.Split(values[0], delim) + } + val, err := d.parseArray(values, sm, schema) + return val, ok, err +} + +// parseArray returns an array that contains items from a raw array. +// Every item is parsed as a primitive value. +// The function returns an error when an error happened while parse array's items. +func (d *urlValuesDecoder) parseArray(raw []string, sm *openapi3.SerializationMethod, schemaRef *openapi3.SchemaRef) ([]interface{}, error) { + var value []interface{} + + for i, v := range raw { + item, err := d.parseValue(v, schemaRef.Value.Items) + if err != nil { + if v, ok := err.(*ParseError); ok { + return nil, &ParseError{path: []interface{}{i}, Cause: v} + } + return nil, fmt.Errorf("item %d: %w", i, err) + } + + // If the items are nil, then the array is nil. There shouldn't be case where some values are actual primitive + // values and some are nil values. + if item == nil { + return nil, nil + } + value = append(value, item) + } + return value, nil +} + +func (d *urlValuesDecoder) parseValue(v string, schema *openapi3.SchemaRef) (interface{}, error) { + if len(schema.Value.AllOf) > 0 { + var value interface{} + var err error + for _, sr := range schema.Value.AllOf { + value, err = d.parseValue(v, sr) + if value == nil || err != nil { + break + } + } + return value, err + } + + if len(schema.Value.AnyOf) > 0 { + var value interface{} + var err error + for _, sr := range schema.Value.AnyOf { + if value, err = d.parseValue(v, sr); err == nil { + return value, nil + } + } + + return nil, err + } + + if len(schema.Value.OneOf) > 0 { + isMatched := 0 + var value interface{} + var err error + for _, sr := range schema.Value.OneOf { + result, err := d.parseValue(v, sr) + if err == nil { + value = result + isMatched++ + } + } + if isMatched == 1 { + return value, nil + } else if isMatched > 1 { + return nil, fmt.Errorf("decoding oneOf failed: %d schemas matched", isMatched) + } else if isMatched == 0 { + return nil, fmt.Errorf("decoding oneOf failed: %d schemas matched", isMatched) + } + + return nil, err + } + + if schema.Value.Not != nil { + // TODO(decode not): handle decoding "not" JSON Schema + return nil, errors.New("not implemented: decoding 'not'") + } + + return parsePrimitive(v, schema) + +} + +func (d *urlValuesDecoder) DecodeObject(param string, sm *openapi3.SerializationMethod, schema *openapi3.SchemaRef) (map[string]interface{}, bool, error) { + var propsFn func(url.Values) (map[string]string, error) + switch sm.Style { + case "form": + propsFn = func(params url.Values) (map[string]string, error) { + if len(params) == 0 { + // HTTP request does not contain query parameters. + return nil, nil + } + if sm.Explode { + props := make(map[string]string) + for key, values := range params { + props[key] = values[0] + } + return props, nil + } + values := params[param] + if len(values) == 0 { + // HTTP request does not contain a value of the target query parameter. + return nil, nil + } + return propsFromString(values[0], ",", ",") + } + case "deepObject": + propsFn = func(params url.Values) (map[string]string, error) { + props := make(map[string]string) + for key, values := range params { + groups := regexp.MustCompile(fmt.Sprintf("%s\\[(.+?)\\]", param)).FindAllStringSubmatch(key, -1) + if len(groups) == 0 { + // A query parameter's name does not match the required format, so skip it. + continue + } + props[groups[0][1]] = values[0] + } + if len(props) == 0 { + // HTTP request does not contain query parameters encoded by rules of style "deepObject". + return nil, nil + } + return props, nil + } + default: + return nil, false, invalidSerializationMethodErr(sm) + } + + props, err := propsFn(d.values) + if err != nil { + return nil, false, err + } + if props == nil { + return nil, false, nil + } + + // check the props + found := false + for propName := range schema.Value.Properties { + if _, ok := props[propName]; ok { + found = true + break + } + } + val, err := makeObject(props, schema) + return val, found, err +} + +// headerParamDecoder decodes values of header parameters. +type headerParamDecoder struct { + header http.Header +} + +func (d *headerParamDecoder) DecodePrimitive(param string, sm *openapi3.SerializationMethod, schema *openapi3.SchemaRef) (interface{}, bool, error) { + if sm.Style != "simple" { + return nil, false, invalidSerializationMethodErr(sm) + } + + raw, ok := d.header[http.CanonicalHeaderKey(param)] + if !ok || len(raw) == 0 { + // HTTP request does not contains a corresponding header or has the empty value + return nil, ok, nil + } + + val, err := parsePrimitive(raw[0], schema) + return val, ok, err +} + +func (d *headerParamDecoder) DecodeArray(param string, sm *openapi3.SerializationMethod, schema *openapi3.SchemaRef) ([]interface{}, bool, error) { + if sm.Style != "simple" { + return nil, false, invalidSerializationMethodErr(sm) + } + + raw, ok := d.header[http.CanonicalHeaderKey(param)] + if !ok || len(raw) == 0 { + // HTTP request does not contains a corresponding header + return nil, ok, nil + } + + val, err := parseArray(strings.Split(raw[0], ","), schema) + return val, ok, err +} + +func (d *headerParamDecoder) DecodeObject(param string, sm *openapi3.SerializationMethod, schema *openapi3.SchemaRef) (map[string]interface{}, bool, error) { + if sm.Style != "simple" { + return nil, false, invalidSerializationMethodErr(sm) + } + valueDelim := "," + if sm.Explode { + valueDelim = "=" + } + + raw, ok := d.header[http.CanonicalHeaderKey(param)] + if !ok || len(raw) == 0 { + // HTTP request does not contain a corresponding header. + return nil, ok, nil + } + props, err := propsFromString(raw[0], ",", valueDelim) + if err != nil { + return nil, ok, err + } + val, err := makeObject(props, schema) + return val, ok, err +} + +// cookieParamDecoder decodes values of cookie parameters. +type cookieParamDecoder struct { + req *http.Request +} + +func (d *cookieParamDecoder) DecodePrimitive(param string, sm *openapi3.SerializationMethod, schema *openapi3.SchemaRef) (interface{}, bool, error) { + if sm.Style != "form" { + return nil, false, invalidSerializationMethodErr(sm) + } + + cookie, err := d.req.Cookie(param) + found := err != http.ErrNoCookie + if !found { + // HTTP request does not contain a corresponding cookie. + return nil, found, nil + } + if err != nil { + return nil, found, fmt.Errorf("decoding param %q: %w", param, err) + } + + val, err := parsePrimitive(cookie.Value, schema) + return val, found, err +} + +func (d *cookieParamDecoder) DecodeArray(param string, sm *openapi3.SerializationMethod, schema *openapi3.SchemaRef) ([]interface{}, bool, error) { + if sm.Style != "form" || sm.Explode { + return nil, false, invalidSerializationMethodErr(sm) + } + + cookie, err := d.req.Cookie(param) + found := err != http.ErrNoCookie + if !found { + // HTTP request does not contain a corresponding cookie. + return nil, found, nil + } + if err != nil { + return nil, found, fmt.Errorf("decoding param %q: %w", param, err) + } + val, err := parseArray(strings.Split(cookie.Value, ","), schema) + return val, found, err +} + +func (d *cookieParamDecoder) DecodeObject(param string, sm *openapi3.SerializationMethod, schema *openapi3.SchemaRef) (map[string]interface{}, bool, error) { + if sm.Style != "form" || sm.Explode { + return nil, false, invalidSerializationMethodErr(sm) + } + + cookie, err := d.req.Cookie(param) + found := err != http.ErrNoCookie + if !found { + // HTTP request does not contain a corresponding cookie. + return nil, found, nil + } + if err != nil { + return nil, found, fmt.Errorf("decoding param %q: %w", param, err) + } + props, err := propsFromString(cookie.Value, ",", ",") + if err != nil { + return nil, found, err + } + val, err := makeObject(props, schema) + return val, found, err +} + +// propsFromString returns a properties map that is created by splitting a source string by propDelim and valueDelim. +// The source string must have a valid format: pairs separated by . +// The function returns an error when the source string has an invalid format. +func propsFromString(src, propDelim, valueDelim string) (map[string]string, error) { + props := make(map[string]string) + pairs := strings.Split(src, propDelim) + + // When propDelim and valueDelim is equal the source string follow the next rule: + // every even item of pairs is a properies's name, and the subsequent odd item is a property's value. + if propDelim == valueDelim { + // Taking into account the rule above, a valid source string must be splitted by propDelim + // to an array with an even number of items. + if len(pairs)%2 != 0 { + return nil, &ParseError{ + Kind: KindInvalidFormat, + Value: src, + Reason: fmt.Sprintf("a value must be a list of object's properties in format \"name%svalue\" separated by %s", valueDelim, propDelim), + } + } + for i := 0; i < len(pairs)/2; i++ { + props[pairs[i*2]] = pairs[i*2+1] + } + return props, nil + } + + // When propDelim and valueDelim is not equal the source string follow the next rule: + // every item of pairs is a string that follows format . + for _, pair := range pairs { + prop := strings.Split(pair, valueDelim) + if len(prop) != 2 { + return nil, &ParseError{ + Kind: KindInvalidFormat, + Value: src, + Reason: fmt.Sprintf("a value must be a list of object's properties in format \"name%svalue\" separated by %s", valueDelim, propDelim), + } + } + props[prop[0]] = prop[1] + } + return props, nil +} + +// makeObject returns an object that contains properties from props. +// A value of every property is parsed as a primitive value. +// The function returns an error when an error happened while parse object's properties. +func makeObject(props map[string]string, schema *openapi3.SchemaRef) (map[string]interface{}, error) { + obj := make(map[string]interface{}) + for propName, propSchema := range schema.Value.Properties { + value, err := parsePrimitive(props[propName], propSchema) + if err != nil { + if v, ok := err.(*ParseError); ok { + return nil, &ParseError{path: []interface{}{propName}, Cause: v} + } + return nil, fmt.Errorf("property %q: %w", propName, err) + } + obj[propName] = value + } + return obj, nil +} + +// parseArray returns an array that contains items from a raw array. +// Every item is parsed as a primitive value. +// The function returns an error when an error happened while parse array's items. +func parseArray(raw []string, schemaRef *openapi3.SchemaRef) ([]interface{}, error) { + var value []interface{} + for i, v := range raw { + item, err := parsePrimitive(v, schemaRef.Value.Items) + if err != nil { + if v, ok := err.(*ParseError); ok { + return nil, &ParseError{path: []interface{}{i}, Cause: v} + } + return nil, fmt.Errorf("item %d: %w", i, err) + } + + // If the items are nil, then the array is nil. There shouldn't be case where some values are actual primitive + // values and some are nil values. + if item == nil { + return nil, nil + } + value = append(value, item) + } + return value, nil +} + +// parsePrimitive returns a value that is created by parsing a source string to a primitive type +// that is specified by a schema. The function returns nil when the source string is empty. +// The function panics when a schema has a non-primitive type. +func parsePrimitive(raw string, schema *openapi3.SchemaRef) (interface{}, error) { + if raw == "" { + return nil, nil + } + switch schema.Value.Type { + case "integer": + if schema.Value.Format == "int32" { + v, err := strconv.ParseInt(raw, 0, 32) + if err != nil { + return nil, &ParseError{Kind: KindInvalidFormat, Value: raw, Reason: "an invalid " + schema.Value.Type, Cause: err.(*strconv.NumError).Err} + } + return int32(v), nil + } + v, err := strconv.ParseInt(raw, 0, 64) + if err != nil { + return nil, &ParseError{Kind: KindInvalidFormat, Value: raw, Reason: "an invalid " + schema.Value.Type, Cause: err.(*strconv.NumError).Err} + } + return v, nil + case "number": + v, err := strconv.ParseFloat(raw, 64) + if err != nil { + return nil, &ParseError{Kind: KindInvalidFormat, Value: raw, Reason: "an invalid " + schema.Value.Type, Cause: err.(*strconv.NumError).Err} + } + return v, nil + case "boolean": + v, err := strconv.ParseBool(raw) + if err != nil { + return nil, &ParseError{Kind: KindInvalidFormat, Value: raw, Reason: "an invalid " + schema.Value.Type, Cause: err.(*strconv.NumError).Err} + } + return v, nil + case "string": + return raw, nil + default: + panic(fmt.Sprintf("schema has non primitive type %q", schema.Value.Type)) + } +} + +// EncodingFn is a function that returns an encoding of a request body's part. +type EncodingFn func(partName string) *openapi3.Encoding + +// BodyDecoder is an interface to decode a body of a request or response. +// An implementation must return a value that is a primitive, []interface{}, or map[string]interface{}. +type BodyDecoder func(io.Reader, http.Header, *openapi3.SchemaRef, EncodingFn) (interface{}, error) + +// bodyDecoders contains decoders for supported content types of a body. +// By default, there is content type "application/json" is supported only. +var bodyDecoders = make(map[string]BodyDecoder) + +// RegisteredBodyDecoder returns the registered body decoder for the given content type. +// +// If no decoder was registered for the given content type, nil is returned. +// This call is not thread-safe: body decoders should not be created/destroyed by multiple goroutines. +func RegisteredBodyDecoder(contentType string) BodyDecoder { + return bodyDecoders[contentType] +} + +// RegisterBodyDecoder registers a request body's decoder for a content type. +// +// If a decoder for the specified content type already exists, the function replaces +// it with the specified decoder. +// This call is not thread-safe: body decoders should not be created/destroyed by multiple goroutines. +func RegisterBodyDecoder(contentType string, decoder BodyDecoder) { + if contentType == "" { + panic("contentType is empty") + } + if decoder == nil { + panic("decoder is not defined") + } + bodyDecoders[contentType] = decoder +} + +// UnregisterBodyDecoder dissociates a body decoder from a content type. +// +// Decoding this content type will result in an error. +// This call is not thread-safe: body decoders should not be created/destroyed by multiple goroutines. +func UnregisterBodyDecoder(contentType string) { + if contentType == "" { + panic("contentType is empty") + } + delete(bodyDecoders, contentType) +} + +var headerCT = http.CanonicalHeaderKey("Content-Type") + +const prefixUnsupportedCT = "unsupported content type" + +// decodeBody returns a decoded body. +// The function returns ParseError when a body is invalid. +func decodeBody(body io.Reader, header http.Header, schema *openapi3.SchemaRef, encFn EncodingFn) ( + string, + interface{}, + error, +) { + contentType := header.Get(headerCT) + if contentType == "" { + if _, ok := body.(*multipart.Part); ok { + contentType = "text/plain" + } + } + mediaType := parseMediaType(contentType) + decoder, ok := bodyDecoders[mediaType] + if !ok { + return "", nil, &ParseError{ + Kind: KindUnsupportedFormat, + Reason: fmt.Sprintf("%s %q", prefixUnsupportedCT, mediaType), + } + } + value, err := decoder(body, header, schema, encFn) + if err != nil { + return "", nil, err + } + return mediaType, value, nil +} + +func init() { + RegisterBodyDecoder("application/json", jsonBodyDecoder) + RegisterBodyDecoder("application/json-patch+json", jsonBodyDecoder) + RegisterBodyDecoder("application/octet-stream", FileBodyDecoder) + RegisterBodyDecoder("application/problem+json", jsonBodyDecoder) + RegisterBodyDecoder("application/x-www-form-urlencoded", urlencodedBodyDecoder) + RegisterBodyDecoder("application/x-yaml", yamlBodyDecoder) + RegisterBodyDecoder("application/yaml", yamlBodyDecoder) + RegisterBodyDecoder("application/zip", zipFileBodyDecoder) + RegisterBodyDecoder("multipart/form-data", multipartBodyDecoder) + RegisterBodyDecoder("text/csv", csvBodyDecoder) + RegisterBodyDecoder("text/plain", plainBodyDecoder) +} + +func plainBodyDecoder(body io.Reader, header http.Header, schema *openapi3.SchemaRef, encFn EncodingFn) (interface{}, error) { + data, err := ioutil.ReadAll(body) + if err != nil { + return nil, &ParseError{Kind: KindInvalidFormat, Cause: err} + } + return string(data), nil +} + +func jsonBodyDecoder(body io.Reader, header http.Header, schema *openapi3.SchemaRef, encFn EncodingFn) (interface{}, error) { + var value interface{} + dec := json.NewDecoder(body) + dec.UseNumber() + if err := dec.Decode(&value); err != nil { + return nil, &ParseError{Kind: KindInvalidFormat, Cause: err} + } + return value, nil +} + +func yamlBodyDecoder(body io.Reader, header http.Header, schema *openapi3.SchemaRef, encFn EncodingFn) (interface{}, error) { + var value interface{} + if err := yaml.NewDecoder(body).Decode(&value); err != nil { + return nil, &ParseError{Kind: KindInvalidFormat, Cause: err} + } + return value, nil +} + +func urlencodedBodyDecoder(body io.Reader, header http.Header, schema *openapi3.SchemaRef, encFn EncodingFn) (interface{}, error) { + // Validate schema of request body. + // By the OpenAPI 3 specification request body's schema must have type "object". + // Properties of the schema describes individual parts of request body. + if schema.Value.Type != "object" { + return nil, errors.New("unsupported schema of request body") + } + for propName, propSchema := range schema.Value.Properties { + switch propSchema.Value.Type { + case "object": + return nil, fmt.Errorf("unsupported schema of request body's property %q", propName) + case "array": + items := propSchema.Value.Items.Value + if items.Type != "string" && items.Type != "integer" && items.Type != "number" && items.Type != "boolean" { + return nil, fmt.Errorf("unsupported schema of request body's property %q", propName) + } + } + } + + // Parse form. + b, err := ioutil.ReadAll(body) + if err != nil { + return nil, err + } + values, err := url.ParseQuery(string(b)) + if err != nil { + return nil, err + } + + // Make an object value from form values. + obj := make(map[string]interface{}) + dec := &urlValuesDecoder{values: values} + for name, prop := range schema.Value.Properties { + var ( + value interface{} + enc *openapi3.Encoding + ) + if encFn != nil { + enc = encFn(name) + } + sm := enc.SerializationMethod() + + if value, _, err = decodeValue(dec, name, sm, prop, false); err != nil { + return nil, err + } + obj[name] = value + } + + return obj, nil +} + +func multipartBodyDecoder(body io.Reader, header http.Header, schema *openapi3.SchemaRef, encFn EncodingFn) (interface{}, error) { + if schema.Value.Type != "object" { + return nil, errors.New("unsupported schema of request body") + } + + // Parse form. + values := make(map[string][]interface{}) + contentType := header.Get(headerCT) + _, params, err := mime.ParseMediaType(contentType) + if err != nil { + return nil, err + } + mr := multipart.NewReader(body, params["boundary"]) + for { + var part *multipart.Part + if part, err = mr.NextPart(); err == io.EOF { + break + } + if err != nil { + return nil, err + } + + var ( + name = part.FormName() + enc *openapi3.Encoding + ) + if encFn != nil { + enc = encFn(name) + } + subEncFn := func(string) *openapi3.Encoding { return enc } + + var valueSchema *openapi3.SchemaRef + if len(schema.Value.AllOf) > 0 { + var exists bool + for _, sr := range schema.Value.AllOf { + if valueSchema, exists = sr.Value.Properties[name]; exists { + break + } + } + if !exists { + return nil, &ParseError{Kind: KindOther, Cause: fmt.Errorf("part %s: undefined", name)} + } + } else { + // If the property's schema has type "array" it is means that the form contains a few parts with the same name. + // Every such part has a type that is defined by an items schema in the property's schema. + var exists bool + if valueSchema, exists = schema.Value.Properties[name]; !exists { + if anyProperties := schema.Value.AdditionalProperties.Has; anyProperties != nil { + switch *anyProperties { + case true: + //additionalProperties: true + continue + default: + //additionalProperties: false + return nil, &ParseError{Kind: KindOther, Cause: fmt.Errorf("part %s: undefined", name)} + } + } + if schema.Value.AdditionalProperties.Schema == nil { + return nil, &ParseError{Kind: KindOther, Cause: fmt.Errorf("part %s: undefined", name)} + } + if valueSchema, exists = schema.Value.AdditionalProperties.Schema.Value.Properties[name]; !exists { + return nil, &ParseError{Kind: KindOther, Cause: fmt.Errorf("part %s: undefined", name)} + } + } + if valueSchema.Value.Type == "array" { + valueSchema = valueSchema.Value.Items + } + } + + var value interface{} + if _, value, err = decodeBody(part, http.Header(part.Header), valueSchema, subEncFn); err != nil { + if v, ok := err.(*ParseError); ok { + return nil, &ParseError{path: []interface{}{name}, Cause: v} + } + return nil, fmt.Errorf("part %s: %w", name, err) + } + values[name] = append(values[name], value) + } + + allTheProperties := make(map[string]*openapi3.SchemaRef) + if len(schema.Value.AllOf) > 0 { + for _, sr := range schema.Value.AllOf { + for k, v := range sr.Value.Properties { + allTheProperties[k] = v + } + if addProps := sr.Value.AdditionalProperties.Schema; addProps != nil { + for k, v := range addProps.Value.Properties { + allTheProperties[k] = v + } + } + } + } else { + for k, v := range schema.Value.Properties { + allTheProperties[k] = v + } + if addProps := schema.Value.AdditionalProperties.Schema; addProps != nil { + for k, v := range addProps.Value.Properties { + allTheProperties[k] = v + } + } + } + + // Make an object value from form values. + obj := make(map[string]interface{}) + for name, prop := range allTheProperties { + vv := values[name] + if len(vv) == 0 { + continue + } + if prop.Value.Type == "array" { + obj[name] = vv + } else { + obj[name] = vv[0] + } + } + + return obj, nil +} + +// FileBodyDecoder is a body decoder that decodes a file body to a string. +func FileBodyDecoder(body io.Reader, header http.Header, schema *openapi3.SchemaRef, encFn EncodingFn) (interface{}, error) { + data, err := ioutil.ReadAll(body) + if err != nil { + return nil, err + } + return string(data), nil +} + +// zipFileBodyDecoder is a body decoder that decodes a zip file body to a string. +func zipFileBodyDecoder(body io.Reader, header http.Header, schema *openapi3.SchemaRef, encFn EncodingFn) (interface{}, error) { + buff := bytes.NewBuffer([]byte{}) + size, err := io.Copy(buff, body) + if err != nil { + return nil, err + } + + zr, err := zip.NewReader(bytes.NewReader(buff.Bytes()), size) + if err != nil { + return nil, err + } + + const bufferSize = 256 + content := make([]byte, 0, bufferSize*len(zr.File)) + buffer := make([]byte /*0,*/, bufferSize) + + for _, f := range zr.File { + err := func() error { + rc, err := f.Open() + if err != nil { + return err + } + defer func() { + _ = rc.Close() + }() + + for { + n, err := rc.Read(buffer) + if 0 < n { + content = append(content, buffer...) + } + if err == io.EOF { + break + } + if err != nil { + return err + } + } + + return nil + }() + + if err != nil { + return nil, err + } + } + + return string(content), nil +} + +// csvBodyDecoder is a body decoder that decodes a csv body to a string. +func csvBodyDecoder(body io.Reader, header http.Header, schema *openapi3.SchemaRef, encFn EncodingFn) (interface{}, error) { + r := csv.NewReader(body) + + var content string + for { + record, err := r.Read() + if err == io.EOF { + break + } + if err != nil { + return nil, err + } + + content += strings.Join(record, ",") + "\n" + } + + return content, nil +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/req_resp_encoder.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/req_resp_encoder.go new file mode 100644 index 00000000..36b7db6f --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/req_resp_encoder.go @@ -0,0 +1,49 @@ +package openapi3filter + +import ( + "encoding/json" + "fmt" +) + +func encodeBody(body interface{}, mediaType string) ([]byte, error) { + encoder, ok := bodyEncoders[mediaType] + if !ok { + return nil, &ParseError{ + Kind: KindUnsupportedFormat, + Reason: fmt.Sprintf("%s %q", prefixUnsupportedCT, mediaType), + } + } + return encoder(body) +} + +type BodyEncoder func(body interface{}) ([]byte, error) + +var bodyEncoders = map[string]BodyEncoder{ + "application/json": json.Marshal, +} + +func RegisterBodyEncoder(contentType string, encoder BodyEncoder) { + if contentType == "" { + panic("contentType is empty") + } + if encoder == nil { + panic("encoder is not defined") + } + bodyEncoders[contentType] = encoder +} + +// This call is not thread-safe: body encoders should not be created/destroyed by multiple goroutines. +func UnregisterBodyEncoder(contentType string) { + if contentType == "" { + panic("contentType is empty") + } + delete(bodyEncoders, contentType) +} + +// RegisteredBodyEncoder returns the registered body encoder for the given content type. +// +// If no encoder was registered for the given content type, nil is returned. +// This call is not thread-safe: body encoders should not be created/destroyed by multiple goroutines. +func RegisteredBodyEncoder(contentType string) BodyEncoder { + return bodyEncoders[contentType] +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/validate_request.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/validate_request.go new file mode 100644 index 00000000..f4debcda --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/validate_request.go @@ -0,0 +1,398 @@ +package openapi3filter + +import ( + "bytes" + "context" + "errors" + "fmt" + "io" + "io/ioutil" + "net/http" + "sort" + + "github.com/getkin/kin-openapi/openapi3" +) + +// ErrAuthenticationServiceMissing is returned when no authentication service +// is defined for the request validator +var ErrAuthenticationServiceMissing = errors.New("missing AuthenticationFunc") + +// ErrInvalidRequired is returned when a required value of a parameter or request body is not defined. +var ErrInvalidRequired = errors.New("value is required but missing") + +// ErrInvalidEmptyValue is returned when a value of a parameter or request body is empty while it's not allowed. +var ErrInvalidEmptyValue = errors.New("empty value is not allowed") + +// ValidateRequest is used to validate the given input according to previous +// loaded OpenAPIv3 spec. If the input does not match the OpenAPIv3 spec, a +// non-nil error will be returned. +// +// Note: One can tune the behavior of uniqueItems: true verification +// by registering a custom function with openapi3.RegisterArrayUniqueItemsChecker +func ValidateRequest(ctx context.Context, input *RequestValidationInput) (err error) { + var me openapi3.MultiError + + options := input.Options + if options == nil { + options = &Options{} + } + route := input.Route + operation := route.Operation + operationParameters := operation.Parameters + pathItemParameters := route.PathItem.Parameters + + // Security + security := operation.Security + // If there aren't any security requirements for the operation + if security == nil { + // Use the global security requirements. + security = &route.Spec.Security + } + if security != nil { + if err = ValidateSecurityRequirements(ctx, input, *security); err != nil && !options.MultiError { + return + } + if err != nil { + me = append(me, err) + } + } + + // For each parameter of the PathItem + for _, parameterRef := range pathItemParameters { + parameter := parameterRef.Value + if operationParameters != nil { + if override := operationParameters.GetByInAndName(parameter.In, parameter.Name); override != nil { + continue + } + } + + if err = ValidateParameter(ctx, input, parameter); err != nil && !options.MultiError { + return + } + if err != nil { + me = append(me, err) + } + } + + // For each parameter of the Operation + for _, parameter := range operationParameters { + if err = ValidateParameter(ctx, input, parameter.Value); err != nil && !options.MultiError { + return + } + if err != nil { + me = append(me, err) + } + } + + // RequestBody + requestBody := operation.RequestBody + if requestBody != nil && !options.ExcludeRequestBody { + if err = ValidateRequestBody(ctx, input, requestBody.Value); err != nil && !options.MultiError { + return + } + if err != nil { + me = append(me, err) + } + } + + if len(me) > 0 { + return me + } + return +} + +// ValidateParameter validates a parameter's value by JSON schema. +// The function returns RequestError with a ParseError cause when unable to parse a value. +// The function returns RequestError with ErrInvalidRequired cause when a value of a required parameter is not defined. +// The function returns RequestError with ErrInvalidEmptyValue cause when a value of a required parameter is not defined. +// The function returns RequestError with a openapi3.SchemaError cause when a value is invalid by JSON schema. +func ValidateParameter(ctx context.Context, input *RequestValidationInput, parameter *openapi3.Parameter) error { + if parameter.Schema == nil && parameter.Content == nil { + // We have no schema for the parameter. Assume that everything passes + // a schema-less check, but this could also be an error. The OpenAPI + // validation allows this to happen. + return nil + } + + options := input.Options + if options == nil { + options = &Options{} + } + + var value interface{} + var err error + var found bool + var schema *openapi3.Schema + + // Validation will ensure that we either have content or schema. + if parameter.Content != nil { + if value, schema, found, err = decodeContentParameter(parameter, input); err != nil { + return &RequestError{Input: input, Parameter: parameter, Err: err} + } + } else { + if value, found, err = decodeStyledParameter(parameter, input); err != nil { + return &RequestError{Input: input, Parameter: parameter, Err: err} + } + schema = parameter.Schema.Value + } + + // Set default value if needed + if !options.SkipSettingDefaults && value == nil && schema != nil && schema.Default != nil { + value = schema.Default + req := input.Request + switch parameter.In { + case openapi3.ParameterInPath: + // Path parameters are required. + // Next check `parameter.Required && !found` will catch this. + case openapi3.ParameterInQuery: + q := req.URL.Query() + q.Add(parameter.Name, fmt.Sprintf("%v", value)) + req.URL.RawQuery = q.Encode() + case openapi3.ParameterInHeader: + req.Header.Add(parameter.Name, fmt.Sprintf("%v", value)) + case openapi3.ParameterInCookie: + req.AddCookie(&http.Cookie{ + Name: parameter.Name, + Value: fmt.Sprintf("%v", value), + }) + } + } + + // Validate a parameter's value and presence. + if parameter.Required && !found { + return &RequestError{Input: input, Parameter: parameter, Reason: ErrInvalidRequired.Error(), Err: ErrInvalidRequired} + } + + if isNilValue(value) { + if !parameter.AllowEmptyValue && found { + return &RequestError{Input: input, Parameter: parameter, Reason: ErrInvalidEmptyValue.Error(), Err: ErrInvalidEmptyValue} + } + return nil + } + if schema == nil { + // A parameter's schema is not defined so skip validation of a parameter's value. + return nil + } + + var opts []openapi3.SchemaValidationOption + if options.MultiError { + opts = make([]openapi3.SchemaValidationOption, 0, 1) + opts = append(opts, openapi3.MultiErrors()) + } + if options.customSchemaErrorFunc != nil { + opts = append(opts, openapi3.SetSchemaErrorMessageCustomizer(options.customSchemaErrorFunc)) + } + if err = schema.VisitJSON(value, opts...); err != nil { + return &RequestError{Input: input, Parameter: parameter, Err: err} + } + return nil +} + +const prefixInvalidCT = "header Content-Type has unexpected value" + +// ValidateRequestBody validates data of a request's body. +// +// The function returns RequestError with ErrInvalidRequired cause when a value is required but not defined. +// The function returns RequestError with a openapi3.SchemaError cause when a value is invalid by JSON schema. +func ValidateRequestBody(ctx context.Context, input *RequestValidationInput, requestBody *openapi3.RequestBody) error { + var ( + req = input.Request + data []byte + ) + + options := input.Options + if options == nil { + options = &Options{} + } + + if req.Body != http.NoBody && req.Body != nil { + defer req.Body.Close() + var err error + if data, err = ioutil.ReadAll(req.Body); err != nil { + return &RequestError{ + Input: input, + RequestBody: requestBody, + Reason: "reading failed", + Err: err, + } + } + // Put the data back into the input + req.Body = nil + if req.GetBody != nil { + if req.Body, err = req.GetBody(); err != nil { + req.Body = nil + } + } + if req.Body == nil { + req.ContentLength = int64(len(data)) + req.GetBody = func() (io.ReadCloser, error) { + return io.NopCloser(bytes.NewReader(data)), nil + } + req.Body, _ = req.GetBody() // no error return + } + } + + if len(data) == 0 { + if requestBody.Required { + return &RequestError{Input: input, RequestBody: requestBody, Err: ErrInvalidRequired} + } + return nil + } + + content := requestBody.Content + if len(content) == 0 { + // A request's body does not have declared content, so skip validation. + return nil + } + + inputMIME := req.Header.Get(headerCT) + contentType := requestBody.Content.Get(inputMIME) + if contentType == nil { + return &RequestError{ + Input: input, + RequestBody: requestBody, + Reason: fmt.Sprintf("%s %q", prefixInvalidCT, inputMIME), + } + } + + if contentType.Schema == nil { + // A JSON schema that describes the received data is not declared, so skip validation. + return nil + } + + encFn := func(name string) *openapi3.Encoding { return contentType.Encoding[name] } + mediaType, value, err := decodeBody(bytes.NewReader(data), req.Header, contentType.Schema, encFn) + if err != nil { + return &RequestError{ + Input: input, + RequestBody: requestBody, + Reason: "failed to decode request body", + Err: err, + } + } + + defaultsSet := false + opts := make([]openapi3.SchemaValidationOption, 0, 4) // 4 potential opts here + opts = append(opts, openapi3.VisitAsRequest()) + if !options.SkipSettingDefaults { + opts = append(opts, openapi3.DefaultsSet(func() { defaultsSet = true })) + } + if options.MultiError { + opts = append(opts, openapi3.MultiErrors()) + } + if options.customSchemaErrorFunc != nil { + opts = append(opts, openapi3.SetSchemaErrorMessageCustomizer(options.customSchemaErrorFunc)) + } + if options.ExcludeReadOnlyValidations { + opts = append(opts, openapi3.DisableReadOnlyValidation()) + } + + // Validate JSON with the schema + if err := contentType.Schema.Value.VisitJSON(value, opts...); err != nil { + schemaId := getSchemaIdentifier(contentType.Schema) + schemaId = prependSpaceIfNeeded(schemaId) + return &RequestError{ + Input: input, + RequestBody: requestBody, + Reason: fmt.Sprintf("doesn't match schema%s", schemaId), + Err: err, + } + } + + if defaultsSet { + var err error + if data, err = encodeBody(value, mediaType); err != nil { + return &RequestError{ + Input: input, + RequestBody: requestBody, + Reason: "rewriting failed", + Err: err, + } + } + // Put the data back into the input + if req.Body != nil { + req.Body.Close() + } + req.ContentLength = int64(len(data)) + req.GetBody = func() (io.ReadCloser, error) { + return io.NopCloser(bytes.NewReader(data)), nil + } + req.Body, _ = req.GetBody() // no error return + } + + return nil +} + +// ValidateSecurityRequirements goes through multiple OpenAPI 3 security +// requirements in order and returns nil on the first valid requirement. +// If no requirement is met, errors are returned in order. +func ValidateSecurityRequirements(ctx context.Context, input *RequestValidationInput, srs openapi3.SecurityRequirements) error { + if len(srs) == 0 { + return nil + } + var errs []error + for _, sr := range srs { + if err := validateSecurityRequirement(ctx, input, sr); err != nil { + if len(errs) == 0 { + errs = make([]error, 0, len(srs)) + } + errs = append(errs, err) + continue + } + return nil + } + return &SecurityRequirementsError{ + SecurityRequirements: srs, + Errors: errs, + } +} + +// validateSecurityRequirement validates a single OpenAPI 3 security requirement +func validateSecurityRequirement(ctx context.Context, input *RequestValidationInput, securityRequirement openapi3.SecurityRequirement) error { + names := make([]string, 0, len(securityRequirement)) + for name := range securityRequirement { + names = append(names, name) + } + sort.Strings(names) + + // Get authentication function + options := input.Options + if options == nil { + options = &Options{} + } + f := options.AuthenticationFunc + if f == nil { + return ErrAuthenticationServiceMissing + } + + var securitySchemes openapi3.SecuritySchemes + if components := input.Route.Spec.Components; components != nil { + securitySchemes = components.SecuritySchemes + } + + // For each scheme for the requirement + for _, name := range names { + var securityScheme *openapi3.SecurityScheme + if securitySchemes != nil { + if ref := securitySchemes[name]; ref != nil { + securityScheme = ref.Value + } + } + if securityScheme == nil { + return &RequestError{ + Input: input, + Err: fmt.Errorf("security scheme %q is not declared", name), + } + } + scopes := securityRequirement[name] + if err := f(ctx, &AuthenticationInput{ + RequestValidationInput: input, + SecuritySchemeName: name, + SecurityScheme: securityScheme, + Scopes: scopes, + }); err != nil { + return err + } + } + return nil +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/validate_request_input.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/validate_request_input.go new file mode 100644 index 00000000..91dd102b --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/validate_request_input.go @@ -0,0 +1,38 @@ +package openapi3filter + +import ( + "net/http" + "net/url" + + "github.com/getkin/kin-openapi/openapi3" + "github.com/getkin/kin-openapi/routers" +) + +// A ContentParameterDecoder takes a parameter definition from the OpenAPI spec, +// and the value which we received for it. It is expected to return the +// value unmarshaled into an interface which can be traversed for +// validation, it should also return the schema to be used for validating the +// object, since there can be more than one in the content spec. +// +// If a query parameter appears multiple times, values[] will have more +// than one value, but for all other parameter types it should have just +// one. +type ContentParameterDecoder func(param *openapi3.Parameter, values []string) (interface{}, *openapi3.Schema, error) + +type RequestValidationInput struct { + Request *http.Request + PathParams map[string]string + QueryParams url.Values + Route *routers.Route + Options *Options + ParamDecoder ContentParameterDecoder +} + +func (input *RequestValidationInput) GetQueryParams() url.Values { + q := input.QueryParams + if q == nil { + q = input.Request.URL.Query() + input.QueryParams = q + } + return q +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/validate_response.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/validate_response.go new file mode 100644 index 00000000..dca13380 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/validate_response.go @@ -0,0 +1,224 @@ +// Package openapi3filter validates that requests and inputs request an OpenAPI 3 specification file. +package openapi3filter + +import ( + "bytes" + "context" + "fmt" + "io/ioutil" + "net/http" + "sort" + "strings" + + "github.com/getkin/kin-openapi/openapi3" +) + +// ValidateResponse is used to validate the given input according to previous +// loaded OpenAPIv3 spec. If the input does not match the OpenAPIv3 spec, a +// non-nil error will be returned. +// +// Note: One can tune the behavior of uniqueItems: true verification +// by registering a custom function with openapi3.RegisterArrayUniqueItemsChecker +func ValidateResponse(ctx context.Context, input *ResponseValidationInput) error { + req := input.RequestValidationInput.Request + switch req.Method { + case "HEAD": + return nil + } + status := input.Status + + // These status codes will never be validated. + // TODO: The list is probably missing some. + switch status { + case http.StatusNotModified, + http.StatusPermanentRedirect, + http.StatusTemporaryRedirect, + http.StatusMovedPermanently: + return nil + } + route := input.RequestValidationInput.Route + options := input.Options + if options == nil { + options = &Options{} + } + + // Find input for the current status + responses := route.Operation.Responses + if len(responses) == 0 { + return nil + } + responseRef := responses.Get(status) // Response + if responseRef == nil { + responseRef = responses.Default() // Default input + } + if responseRef == nil { + // By default, status that is not documented is allowed. + if !options.IncludeResponseStatus { + return nil + } + return &ResponseError{Input: input, Reason: "status is not supported"} + } + response := responseRef.Value + if response == nil { + return &ResponseError{Input: input, Reason: "response has not been resolved"} + } + + opts := make([]openapi3.SchemaValidationOption, 0, 3) // 3 potential options here + if options.MultiError { + opts = append(opts, openapi3.MultiErrors()) + } + if options.customSchemaErrorFunc != nil { + opts = append(opts, openapi3.SetSchemaErrorMessageCustomizer(options.customSchemaErrorFunc)) + } + if options.ExcludeWriteOnlyValidations { + opts = append(opts, openapi3.DisableWriteOnlyValidation()) + } + + headers := make([]string, 0, len(response.Headers)) + for k := range response.Headers { + if k != headerCT { + headers = append(headers, k) + } + } + sort.Strings(headers) + for _, headerName := range headers { + headerRef := response.Headers[headerName] + if err := validateResponseHeader(headerName, headerRef, input, opts); err != nil { + return err + } + } + + if options.ExcludeResponseBody { + // A user turned off validation of a response's body. + return nil + } + + content := response.Content + if len(content) == 0 || options.ExcludeResponseBody { + // An operation does not contains a validation schema for responses with this status code. + return nil + } + + inputMIME := input.Header.Get(headerCT) + contentType := content.Get(inputMIME) + if contentType == nil { + return &ResponseError{ + Input: input, + Reason: fmt.Sprintf("response header Content-Type has unexpected value: %q", inputMIME), + } + } + + if contentType.Schema == nil { + // An operation does not contains a validation schema for responses with this status code. + return nil + } + + // Read response's body. + body := input.Body + + // Response would contain partial or empty input body + // after we begin reading. + // Ensure that this doesn't happen. + input.Body = nil + + // Ensure we close the reader + defer body.Close() + + // Read all + data, err := ioutil.ReadAll(body) + if err != nil { + return &ResponseError{ + Input: input, + Reason: "failed to read response body", + Err: err, + } + } + + // Put the data back into the response. + input.SetBodyBytes(data) + + encFn := func(name string) *openapi3.Encoding { return contentType.Encoding[name] } + _, value, err := decodeBody(bytes.NewBuffer(data), input.Header, contentType.Schema, encFn) + if err != nil { + return &ResponseError{ + Input: input, + Reason: "failed to decode response body", + Err: err, + } + } + + // Validate data with the schema. + if err := contentType.Schema.Value.VisitJSON(value, append(opts, openapi3.VisitAsResponse())...); err != nil { + schemaId := getSchemaIdentifier(contentType.Schema) + schemaId = prependSpaceIfNeeded(schemaId) + return &ResponseError{ + Input: input, + Reason: fmt.Sprintf("response body doesn't match schema%s", schemaId), + Err: err, + } + } + return nil +} + +func validateResponseHeader(headerName string, headerRef *openapi3.HeaderRef, input *ResponseValidationInput, opts []openapi3.SchemaValidationOption) error { + var err error + var decodedValue interface{} + var found bool + var sm *openapi3.SerializationMethod + dec := &headerParamDecoder{header: input.Header} + + if sm, err = headerRef.Value.SerializationMethod(); err != nil { + return &ResponseError{ + Input: input, + Reason: fmt.Sprintf("unable to get header %q serialization method", headerName), + Err: err, + } + } + + if decodedValue, found, err = decodeValue(dec, headerName, sm, headerRef.Value.Schema, headerRef.Value.Required); err != nil { + return &ResponseError{ + Input: input, + Reason: fmt.Sprintf("unable to decode header %q value", headerName), + Err: err, + } + } + + if found { + if err = headerRef.Value.Schema.Value.VisitJSON(decodedValue, opts...); err != nil { + return &ResponseError{ + Input: input, + Reason: fmt.Sprintf("response header %q doesn't match schema", headerName), + Err: err, + } + } + } else if headerRef.Value.Required { + return &ResponseError{ + Input: input, + Reason: fmt.Sprintf("response header %q missing", headerName), + } + } + return nil +} + +// getSchemaIdentifier gets something by which a schema could be identified. +// A schema by itself doesn't have a true identity field. This function makes +// a best effort to get a value that can fill that void. +func getSchemaIdentifier(schema *openapi3.SchemaRef) string { + var id string + + if schema != nil { + id = strings.TrimSpace(schema.Ref) + } + if id == "" && schema.Value != nil { + id = strings.TrimSpace(schema.Value.Title) + } + + return id +} + +func prependSpaceIfNeeded(value string) string { + if len(value) > 0 { + value = " " + value + } + return value +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/validate_response_input.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/validate_response_input.go new file mode 100644 index 00000000..edf38730 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/validate_response_input.go @@ -0,0 +1,42 @@ +package openapi3filter + +import ( + "bytes" + "io" + "io/ioutil" + "net/http" +) + +type ResponseValidationInput struct { + RequestValidationInput *RequestValidationInput + Status int + Header http.Header + Body io.ReadCloser + Options *Options +} + +func (input *ResponseValidationInput) SetBodyBytes(value []byte) *ResponseValidationInput { + input.Body = ioutil.NopCloser(bytes.NewReader(value)) + return input +} + +var JSONPrefixes = []string{ + ")]}',\n", +} + +// TrimJSONPrefix trims one of the possible prefixes +func TrimJSONPrefix(data []byte) []byte { +search: + for _, prefix := range JSONPrefixes { + if len(data) < len(prefix) { + continue + } + for i, b := range data[:len(prefix)] { + if b != prefix[i] { + continue search + } + } + return data[len(prefix):] + } + return data +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/validation_error.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/validation_error.go new file mode 100644 index 00000000..7e685cde --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/validation_error.go @@ -0,0 +1,85 @@ +package openapi3filter + +import ( + "bytes" + "strconv" +) + +// ValidationError struct provides granular error information +// useful for communicating issues back to end user and developer. +// Based on https://jsonapi.org/format/#error-objects +type ValidationError struct { + // A unique identifier for this particular occurrence of the problem. + Id string `json:"id,omitempty" yaml:"id,omitempty"` + // The HTTP status code applicable to this problem. + Status int `json:"status,omitempty" yaml:"status,omitempty"` + // An application-specific error code, expressed as a string value. + Code string `json:"code,omitempty" yaml:"code,omitempty"` + // A short, human-readable summary of the problem. It **SHOULD NOT** change from occurrence to occurrence of the problem, except for purposes of localization. + Title string `json:"title,omitempty" yaml:"title,omitempty"` + // A human-readable explanation specific to this occurrence of the problem. + Detail string `json:"detail,omitempty" yaml:"detail,omitempty"` + // An object containing references to the source of the error + Source *ValidationErrorSource `json:"source,omitempty" yaml:"source,omitempty"` +} + +// ValidationErrorSource struct +type ValidationErrorSource struct { + // A JSON Pointer [RFC6901] to the associated entity in the request document [e.g. \"/data\" for a primary data object, or \"/data/attributes/title\" for a specific attribute]. + Pointer string `json:"pointer,omitempty" yaml:"pointer,omitempty"` + // A string indicating which query parameter caused the error. + Parameter string `json:"parameter,omitempty" yaml:"parameter,omitempty"` +} + +var _ error = &ValidationError{} + +// Error implements the error interface. +func (e *ValidationError) Error() string { + b := new(bytes.Buffer) + b.WriteString("[") + if e.Status != 0 { + b.WriteString(strconv.Itoa(e.Status)) + } + b.WriteString("]") + b.WriteString("[") + if e.Code != "" { + b.WriteString(e.Code) + } + b.WriteString("]") + b.WriteString("[") + if e.Id != "" { + b.WriteString(e.Id) + } + b.WriteString("]") + b.WriteString(" ") + if e.Title != "" { + b.WriteString(e.Title) + b.WriteString(" ") + } + if e.Detail != "" { + b.WriteString("| ") + b.WriteString(e.Detail) + b.WriteString(" ") + } + if e.Source != nil { + b.WriteString("[source ") + if e.Source.Parameter != "" { + b.WriteString("parameter=") + b.WriteString(e.Source.Parameter) + } else if e.Source.Pointer != "" { + b.WriteString("pointer=") + b.WriteString(e.Source.Pointer) + } + b.WriteString("]") + } + + if b.Len() == 0 { + return "no error" + } + return b.String() +} + +// StatusCode implements the StatusCoder interface for DefaultErrorEncoder +func (e *ValidationError) StatusCode() int { + return e.Status +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/validation_error_encoder.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/validation_error_encoder.go new file mode 100644 index 00000000..c03fd139 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/validation_error_encoder.go @@ -0,0 +1,186 @@ +package openapi3filter + +import ( + "context" + "fmt" + "net/http" + "strings" + + "github.com/getkin/kin-openapi/openapi3" + "github.com/getkin/kin-openapi/routers" +) + +// ValidationErrorEncoder wraps a base ErrorEncoder to handle ValidationErrors +type ValidationErrorEncoder struct { + Encoder ErrorEncoder +} + +// Encode implements the ErrorEncoder interface for encoding ValidationErrors +func (enc *ValidationErrorEncoder) Encode(ctx context.Context, err error, w http.ResponseWriter) { + enc.Encoder(ctx, ConvertErrors(err), w) +} + +// ConvertErrors converts all errors to the appropriate error format. +func ConvertErrors(err error) error { + if e, ok := err.(*routers.RouteError); ok { + return convertRouteError(e) + } + + e, ok := err.(*RequestError) + if !ok { + return err + } + + var cErr *ValidationError + if e.Err == nil { + cErr = convertBasicRequestError(e) + } else if e.Err == ErrInvalidRequired { + cErr = convertErrInvalidRequired(e) + } else if e.Err == ErrInvalidEmptyValue { + cErr = convertErrInvalidEmptyValue(e) + } else if innerErr, ok := e.Err.(*ParseError); ok { + cErr = convertParseError(e, innerErr) + } else if innerErr, ok := e.Err.(*openapi3.SchemaError); ok { + cErr = convertSchemaError(e, innerErr) + } + + if cErr != nil { + return cErr + } + return err +} + +func convertRouteError(e *routers.RouteError) *ValidationError { + status := http.StatusNotFound + if e.Error() == routers.ErrMethodNotAllowed.Error() { + status = http.StatusMethodNotAllowed + } + return &ValidationError{Status: status, Title: e.Error()} +} + +func convertBasicRequestError(e *RequestError) *ValidationError { + if strings.HasPrefix(e.Reason, prefixInvalidCT) { + if strings.HasSuffix(e.Reason, `""`) { + return &ValidationError{ + Status: http.StatusUnsupportedMediaType, + Title: "header Content-Type is required", + } + } + return &ValidationError{ + Status: http.StatusUnsupportedMediaType, + Title: prefixUnsupportedCT + strings.TrimPrefix(e.Reason, prefixInvalidCT), + } + } + return &ValidationError{ + Status: http.StatusBadRequest, + Title: e.Error(), + } +} + +func convertErrInvalidRequired(e *RequestError) *ValidationError { + if e.Err == ErrInvalidRequired && e.Parameter != nil { + return &ValidationError{ + Status: http.StatusBadRequest, + Title: fmt.Sprintf("parameter %q in %s is required", e.Parameter.Name, e.Parameter.In), + } + } + return &ValidationError{ + Status: http.StatusBadRequest, + Title: e.Error(), + } +} + +func convertErrInvalidEmptyValue(e *RequestError) *ValidationError { + if e.Err == ErrInvalidEmptyValue && e.Parameter != nil { + return &ValidationError{ + Status: http.StatusBadRequest, + Title: fmt.Sprintf("parameter %q in %s is not allowed to be empty", e.Parameter.Name, e.Parameter.In), + } + } + return &ValidationError{ + Status: http.StatusBadRequest, + Title: e.Error(), + } +} + +func convertParseError(e *RequestError, innerErr *ParseError) *ValidationError { + // We treat path params of the wrong type like a 404 instead of a 400 + if innerErr.Kind == KindInvalidFormat && e.Parameter != nil && e.Parameter.In == "path" { + return &ValidationError{ + Status: http.StatusNotFound, + Title: fmt.Sprintf("resource not found with %q value: %v", e.Parameter.Name, innerErr.Value), + } + } else if strings.HasPrefix(innerErr.Reason, prefixUnsupportedCT) { + return &ValidationError{ + Status: http.StatusUnsupportedMediaType, + Title: innerErr.Reason, + } + } else if innerErr.RootCause() != nil { + if rootErr, ok := innerErr.Cause.(*ParseError); ok && + rootErr.Kind == KindInvalidFormat && e.Parameter.In == "query" { + return &ValidationError{ + Status: http.StatusBadRequest, + Title: fmt.Sprintf("parameter %q in %s is invalid: %v is %s", + e.Parameter.Name, e.Parameter.In, rootErr.Value, rootErr.Reason), + } + } + return &ValidationError{ + Status: http.StatusBadRequest, + Title: innerErr.Reason, + } + } + return nil +} + +func convertSchemaError(e *RequestError, innerErr *openapi3.SchemaError) *ValidationError { + cErr := &ValidationError{Title: innerErr.Reason} + + // Handle "Origin" error + if originErr, ok := innerErr.Origin.(*openapi3.SchemaError); ok { + cErr = convertSchemaError(e, originErr) + } + + // Add http status code + if e.Parameter != nil { + cErr.Status = http.StatusBadRequest + } else if e.RequestBody != nil { + cErr.Status = http.StatusUnprocessableEntity + } + + // Add error source + if e.Parameter != nil { + // We have a JSONPointer in the query param too so need to + // make sure 'Parameter' check takes priority over 'Pointer' + cErr.Source = &ValidationErrorSource{Parameter: e.Parameter.Name} + } else if ptr := innerErr.JSONPointer(); ptr != nil { + cErr.Source = &ValidationErrorSource{Pointer: toJSONPointer(ptr)} + } + + // Add details on allowed values for enums + if innerErr.SchemaField == "enum" { + enums := make([]string, 0, len(innerErr.Schema.Enum)) + for _, enum := range innerErr.Schema.Enum { + enums = append(enums, fmt.Sprintf("%v", enum)) + } + cErr.Detail = fmt.Sprintf("value %v at %s must be one of: %s", + innerErr.Value, + toJSONPointer(innerErr.JSONPointer()), + strings.Join(enums, ", ")) + value := fmt.Sprintf("%v", innerErr.Value) + if e.Parameter != nil && + (e.Parameter.Explode == nil || *e.Parameter.Explode) && + (e.Parameter.Style == "" || e.Parameter.Style == "form") && + strings.Contains(value, ",") { + parts := strings.Split(value, ",") + cErr.Detail = fmt.Sprintf("%s; perhaps you intended '?%s=%s'", + cErr.Detail, + e.Parameter.Name, + strings.Join(parts, "&"+e.Parameter.Name+"=")) + } + } + return cErr +} + +func toJSONPointer(reversePath []string) string { + return "/" + strings.Join(reversePath, "/") +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/validation_handler.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/validation_handler.go new file mode 100644 index 00000000..d4bb1efa --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/validation_handler.go @@ -0,0 +1,107 @@ +package openapi3filter + +import ( + "context" + "net/http" + + "github.com/getkin/kin-openapi/openapi3" + "github.com/getkin/kin-openapi/routers" + legacyrouter "github.com/getkin/kin-openapi/routers/legacy" +) + +// AuthenticationFunc allows for custom security requirement validation. +// A non-nil error fails authentication according to https://spec.openapis.org/oas/v3.1.0#security-requirement-object +// See ValidateSecurityRequirements +type AuthenticationFunc func(context.Context, *AuthenticationInput) error + +// NoopAuthenticationFunc is an AuthenticationFunc +func NoopAuthenticationFunc(context.Context, *AuthenticationInput) error { return nil } + +var _ AuthenticationFunc = NoopAuthenticationFunc + +type ValidationHandler struct { + Handler http.Handler + AuthenticationFunc AuthenticationFunc + File string + ErrorEncoder ErrorEncoder + router routers.Router +} + +func (h *ValidationHandler) Load() error { + loader := openapi3.NewLoader() + doc, err := loader.LoadFromFile(h.File) + if err != nil { + return err + } + if err := doc.Validate(loader.Context); err != nil { + return err + } + if h.router, err = legacyrouter.NewRouter(doc); err != nil { + return err + } + + // set defaults + if h.Handler == nil { + h.Handler = http.DefaultServeMux + } + if h.AuthenticationFunc == nil { + h.AuthenticationFunc = NoopAuthenticationFunc + } + if h.ErrorEncoder == nil { + h.ErrorEncoder = DefaultErrorEncoder + } + + return nil +} + +func (h *ValidationHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + if handled := h.before(w, r); handled { + return + } + // TODO: validateResponse + h.Handler.ServeHTTP(w, r) +} + +// Middleware implements gorilla/mux MiddlewareFunc +func (h *ValidationHandler) Middleware(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if handled := h.before(w, r); handled { + return + } + // TODO: validateResponse + next.ServeHTTP(w, r) + }) +} + +func (h *ValidationHandler) before(w http.ResponseWriter, r *http.Request) (handled bool) { + if err := h.validateRequest(r); err != nil { + h.ErrorEncoder(r.Context(), err, w) + return true + } + return false +} + +func (h *ValidationHandler) validateRequest(r *http.Request) error { + // Find route + route, pathParams, err := h.router.FindRoute(r) + if err != nil { + return err + } + + options := &Options{ + AuthenticationFunc: h.AuthenticationFunc, + } + + // Validate request + requestValidationInput := &RequestValidationInput{ + Request: r, + PathParams: pathParams, + Route: route, + Options: options, + } + if err = ValidateRequest(r.Context(), requestValidationInput); err != nil { + return err + } + + return nil +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/validation_kit.go b/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/validation_kit.go new file mode 100644 index 00000000..9e11e4fc --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/openapi3filter/validation_kit.go @@ -0,0 +1,85 @@ +package openapi3filter + +import ( + "context" + "encoding/json" + "net/http" +) + +/////////////////////////////////////////////////////////////////////////////////// +// We didn't want to tie kin-openapi too tightly with go-kit. +// This file contains the ErrorEncoder and DefaultErrorEncoder function +// borrowed from this project. +// +// The MIT License (MIT) +// +// Copyright (c) 2015 Peter Bourgon +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. +/////////////////////////////////////////////////////////////////////////////////// + +// ErrorEncoder is responsible for encoding an error to the ResponseWriter. +// Users are encouraged to use custom ErrorEncoders to encode HTTP errors to +// their clients, and will likely want to pass and check for their own error +// types. See the example shipping/handling service. +type ErrorEncoder func(ctx context.Context, err error, w http.ResponseWriter) + +// StatusCoder is checked by DefaultErrorEncoder. If an error value implements +// StatusCoder, the StatusCode will be used when encoding the error. By default, +// StatusInternalServerError (500) is used. +type StatusCoder interface { + StatusCode() int +} + +// Headerer is checked by DefaultErrorEncoder. If an error value implements +// Headerer, the provided headers will be applied to the response writer, after +// the Content-Type is set. +type Headerer interface { + Headers() http.Header +} + +// DefaultErrorEncoder writes the error to the ResponseWriter, by default a +// content type of text/plain, a body of the plain text of the error, and a +// status code of 500. If the error implements Headerer, the provided headers +// will be applied to the response. If the error implements json.Marshaler, and +// the marshaling succeeds, a content type of application/json and the JSON +// encoded form of the error will be used. If the error implements StatusCoder, +// the provided StatusCode will be used instead of 500. +func DefaultErrorEncoder(_ context.Context, err error, w http.ResponseWriter) { + contentType, body := "text/plain; charset=utf-8", []byte(err.Error()) + if marshaler, ok := err.(json.Marshaler); ok { + if jsonBody, marshalErr := marshaler.MarshalJSON(); marshalErr == nil { + contentType, body = "application/json; charset=utf-8", jsonBody + } + } + w.Header().Set("Content-Type", contentType) + if headerer, ok := err.(Headerer); ok { + for k, values := range headerer.Headers() { + for _, v := range values { + w.Header().Add(k, v) + } + } + } + code := http.StatusInternalServerError + if sc, ok := err.(StatusCoder); ok { + code = sc.StatusCode() + } + w.WriteHeader(code) + w.Write(body) +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/routers/gorillamux/router.go b/index/server/vendor/github.com/getkin/kin-openapi/routers/gorillamux/router.go new file mode 100644 index 00000000..bbf81cea --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/routers/gorillamux/router.go @@ -0,0 +1,263 @@ +// Package gorillamux implements a router. +// +// It differs from the legacy router: +// * it provides somewhat granular errors: "path not found", "method not allowed". +// * it handles matching routes with extensions (e.g. /books/{id}.json) +// * it handles path patterns with a different syntax (e.g. /params/{x}/{y}/{z:.*}) +package gorillamux + +import ( + "net/http" + "net/url" + "regexp" + "sort" + "strings" + + "github.com/gorilla/mux" + + "github.com/getkin/kin-openapi/openapi3" + "github.com/getkin/kin-openapi/routers" +) + +var _ routers.Router = &Router{} + +// Router helps link http.Request.s and an OpenAPIv3 spec +type Router struct { + muxes []routeMux + routes []*routers.Route +} + +type varsf func(vars map[string]string) + +type routeMux struct { + muxRoute *mux.Route + varsUpdater varsf +} + +type srv struct { + schemes []string + host, base string + server *openapi3.Server + varsUpdater varsf +} + +var singleVariableMatcher = regexp.MustCompile(`^\{([^{}]+)\}$`) + +// TODO: Handle/HandlerFunc + ServeHTTP (When there is a match, the route variables can be retrieved calling mux.Vars(request)) + +// NewRouter creates a gorilla/mux router. +// Assumes spec is .Validate()d +// Note that a variable for the port number MUST have a default value and only this value will match as the port (see issue #367). +func NewRouter(doc *openapi3.T) (routers.Router, error) { + servers, err := makeServers(doc.Servers) + if err != nil { + return nil, err + } + + muxRouter := mux.NewRouter().UseEncodedPath() + r := &Router{} + for _, path := range doc.Paths.InMatchingOrder() { + pathItem := doc.Paths[path] + if len(pathItem.Servers) > 0 { + if servers, err = makeServers(pathItem.Servers); err != nil { + return nil, err + } + } + + operations := pathItem.Operations() + methods := make([]string, 0, len(operations)) + for method := range operations { + methods = append(methods, method) + } + sort.Strings(methods) + + for _, s := range servers { + muxRoute := muxRouter.Path(s.base + path).Methods(methods...) + if schemes := s.schemes; len(schemes) != 0 { + muxRoute.Schemes(schemes...) + } + if host := s.host; host != "" { + muxRoute.Host(host) + } + if err := muxRoute.GetError(); err != nil { + return nil, err + } + r.muxes = append(r.muxes, routeMux{ + muxRoute: muxRoute, + varsUpdater: s.varsUpdater, + }) + r.routes = append(r.routes, &routers.Route{ + Spec: doc, + Server: s.server, + Path: path, + PathItem: pathItem, + Method: "", + Operation: nil, + }) + } + } + return r, nil +} + +// FindRoute extracts the route and parameters of an http.Request +func (r *Router) FindRoute(req *http.Request) (*routers.Route, map[string]string, error) { + for i, m := range r.muxes { + var match mux.RouteMatch + if m.muxRoute.Match(req, &match) { + if err := match.MatchErr; err != nil { + // What then? + } + vars := match.Vars + if f := m.varsUpdater; f != nil { + f(vars) + } + route := *r.routes[i] + route.Method = req.Method + route.Operation = route.Spec.Paths[route.Path].GetOperation(route.Method) + return &route, vars, nil + } + switch match.MatchErr { + case nil: + case mux.ErrMethodMismatch: + return nil, nil, routers.ErrMethodNotAllowed + default: // What then? + } + } + return nil, nil, routers.ErrPathNotFound +} + +func makeServers(in openapi3.Servers) ([]srv, error) { + servers := make([]srv, 0, len(in)) + for _, server := range in { + serverURL := server.URL + if submatch := singleVariableMatcher.FindStringSubmatch(serverURL); submatch != nil { + sVar := submatch[1] + sVal := server.Variables[sVar].Default + serverURL = strings.ReplaceAll(serverURL, "{"+sVar+"}", sVal) + var varsUpdater varsf + if lhs := strings.TrimSuffix(serverURL, server.Variables[sVar].Default); lhs != "" { + varsUpdater = func(vars map[string]string) { vars[sVar] = lhs } + } + svr, err := newSrv(serverURL, server, varsUpdater) + if err != nil { + return nil, err + } + + servers = append(servers, svr) + continue + } + + // If a variable represents the port "http://domain.tld:{port}/bla" + // then url.Parse() cannot parse "http://domain.tld:`bEncode({port})`/bla" + // and mux is not able to set the {port} variable + // So we just use the default value for this variable. + // See https://github.com/getkin/kin-openapi/issues/367 + var varsUpdater varsf + if lhs := strings.Index(serverURL, ":{"); lhs > 0 { + rest := serverURL[lhs+len(":{"):] + rhs := strings.Index(rest, "}") + portVariable := rest[:rhs] + portValue := server.Variables[portVariable].Default + serverURL = strings.ReplaceAll(serverURL, "{"+portVariable+"}", portValue) + varsUpdater = func(vars map[string]string) { + vars[portVariable] = portValue + } + } + + svr, err := newSrv(serverURL, server, varsUpdater) + if err != nil { + return nil, err + } + servers = append(servers, svr) + } + if len(servers) == 0 { + servers = append(servers, srv{}) + } + + return servers, nil +} + +func newSrv(serverURL string, server *openapi3.Server, varsUpdater varsf) (srv, error) { + var schemes []string + if strings.Contains(serverURL, "://") { + scheme0 := strings.Split(serverURL, "://")[0] + schemes = permutePart(scheme0, server) + serverURL = strings.Replace(serverURL, scheme0+"://", schemes[0]+"://", 1) + } + + u, err := url.Parse(bEncode(serverURL)) + if err != nil { + return srv{}, err + } + path := bDecode(u.EscapedPath()) + if len(path) > 0 && path[len(path)-1] == '/' { + path = path[:len(path)-1] + } + svr := srv{ + host: bDecode(u.Host), //u.Hostname()? + base: path, + schemes: schemes, // scheme: []string{scheme0}, TODO: https://github.com/gorilla/mux/issues/624 + server: server, + varsUpdater: varsUpdater, + } + return svr, nil +} + +// Magic strings that temporarily replace "{}" so net/url.Parse() works +var blURL, brURL = strings.Repeat("-", 50), strings.Repeat("_", 50) + +func bEncode(s string) string { + s = strings.Replace(s, "{", blURL, -1) + s = strings.Replace(s, "}", brURL, -1) + return s +} +func bDecode(s string) string { + s = strings.Replace(s, blURL, "{", -1) + s = strings.Replace(s, brURL, "}", -1) + return s +} + +func permutePart(part0 string, srv *openapi3.Server) []string { + type mapAndSlice struct { + m map[string]struct{} + s []string + } + var2val := make(map[string]mapAndSlice) + max := 0 + for name0, v := range srv.Variables { + name := "{" + name0 + "}" + if !strings.Contains(part0, name) { + continue + } + m := map[string]struct{}{v.Default: {}} + for _, value := range v.Enum { + m[value] = struct{}{} + } + if l := len(m); l > max { + max = l + } + s := make([]string, 0, len(m)) + for value := range m { + s = append(s, value) + } + var2val[name] = mapAndSlice{m: m, s: s} + } + if len(var2val) == 0 { + return []string{part0} + } + + partsMap := make(map[string]struct{}, max*len(var2val)) + for i := 0; i < max; i++ { + part := part0 + for name, mas := range var2val { + part = strings.Replace(part, name, mas.s[i%len(mas.s)], -1) + } + partsMap[part] = struct{}{} + } + parts := make([]string, 0, len(partsMap)) + for part := range partsMap { + parts = append(parts, part) + } + sort.Strings(parts) + return parts +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/routers/legacy/pathpattern/node.go b/index/server/vendor/github.com/getkin/kin-openapi/routers/legacy/pathpattern/node.go new file mode 100644 index 00000000..011dda35 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/routers/legacy/pathpattern/node.go @@ -0,0 +1,328 @@ +// Package pathpattern implements path matching. +// +// Examples of supported patterns: +// - "/" +// - "/abc"" +// - "/abc/{variable}" (matches until next '/' or end-of-string) +// - "/abc/{variable*}" (matches everything, including "/abc" if "/abc" has noot) +// - "/abc/{ variable | prefix_(.*}_suffix }" (matches regular expressions) +package pathpattern + +import ( + "bytes" + "fmt" + "regexp" + "sort" + "strings" +) + +var DefaultOptions = &Options{ + SupportWildcard: true, +} + +type Options struct { + SupportWildcard bool + SupportRegExp bool +} + +// PathFromHost converts a host pattern to a path pattern. +// +// Examples: +// - PathFromHost("some-subdomain.domain.com", false) -> "com/./domain/./some-subdomain" +// - PathFromHost("some-subdomain.domain.com", true) -> "com/./domain/./subdomain/-/some" +func PathFromHost(host string, specialDashes bool) string { + buf := make([]byte, 0, len(host)) + end := len(host) + + // Go from end to start + for start := end - 1; start >= 0; start-- { + switch host[start] { + case '.': + buf = append(buf, host[start+1:end]...) + buf = append(buf, '/', '.', '/') + end = start + case '-': + if specialDashes { + buf = append(buf, host[start+1:end]...) + buf = append(buf, '/', '-', '/') + end = start + } + } + } + buf = append(buf, host[:end]...) + return string(buf) +} + +type Node struct { + VariableNames []string + Value interface{} + Suffixes SuffixList +} + +func (currentNode *Node) String() string { + buf := bytes.NewBuffer(make([]byte, 0, 255)) + currentNode.toBuffer(buf, "") + return buf.String() +} + +func (currentNode *Node) toBuffer(buf *bytes.Buffer, linePrefix string) { + if value := currentNode.Value; value != nil { + buf.WriteString(linePrefix) + buf.WriteString("VALUE: ") + fmt.Fprint(buf, value) + buf.WriteString("\n") + } + suffixes := currentNode.Suffixes + if len(suffixes) > 0 { + newLinePrefix := linePrefix + " " + for _, suffix := range suffixes { + buf.WriteString(linePrefix) + buf.WriteString("PATTERN: ") + buf.WriteString(suffix.String()) + buf.WriteString("\n") + suffix.Node.toBuffer(buf, newLinePrefix) + } + } +} + +type SuffixKind int + +// Note that order is important! +const ( + // SuffixKindConstant matches a constant string + SuffixKindConstant = SuffixKind(iota) + + // SuffixKindRegExp matches a regular expression + SuffixKindRegExp + + // SuffixKindVariable matches everything until '/' + SuffixKindVariable + + // SuffixKindEverything matches everything (until end-of-string) + SuffixKindEverything +) + +// Suffix describes condition that +type Suffix struct { + Kind SuffixKind + Pattern string + + // compiled regular expression + regExp *regexp.Regexp + + // Next node + Node *Node +} + +func EqualSuffix(a, b Suffix) bool { + return a.Kind == b.Kind && a.Pattern == b.Pattern +} + +func (suffix Suffix) String() string { + switch suffix.Kind { + case SuffixKindConstant: + return suffix.Pattern + case SuffixKindVariable: + return "{_}" + case SuffixKindEverything: + return "{_*}" + default: + return "{_|" + suffix.Pattern + "}" + } +} + +type SuffixList []Suffix + +func (list SuffixList) Less(i, j int) bool { + a, b := list[i], list[j] + ak, bk := a.Kind, b.Kind + if ak < bk { + return true + } else if bk < ak { + return false + } + return a.Pattern > b.Pattern +} + +func (list SuffixList) Len() int { + return len(list) +} + +func (list SuffixList) Swap(i, j int) { + a, b := list[i], list[j] + list[i], list[j] = b, a +} + +func (currentNode *Node) MustAdd(path string, value interface{}, options *Options) { + node, err := currentNode.CreateNode(path, options) + if err != nil { + panic(err) + } + node.Value = value +} + +func (currentNode *Node) Add(path string, value interface{}, options *Options) error { + node, err := currentNode.CreateNode(path, options) + if err != nil { + return err + } + node.Value = value + return nil +} + +func (currentNode *Node) CreateNode(path string, options *Options) (*Node, error) { + if options == nil { + options = DefaultOptions + } + for strings.HasSuffix(path, "/") { + path = path[:len(path)-1] + } + remaining := path + var variableNames []string +loop: + for { + //remaining = strings.TrimPrefix(remaining, "/") + if len(remaining) == 0 { + // This node is the right one + // Check whether another route already leads to this node + currentNode.VariableNames = variableNames + return currentNode, nil + } + + suffix := Suffix{} + var i int + if strings.HasPrefix(remaining, "/") { + remaining = remaining[1:] + suffix.Kind = SuffixKindConstant + suffix.Pattern = "/" + } else { + i = strings.IndexAny(remaining, "/{") + if i < 0 { + i = len(remaining) + } + if i > 0 { + // Constant string pattern + suffix.Kind = SuffixKindConstant + suffix.Pattern = remaining[:i] + remaining = remaining[i:] + } else if remaining[0] == '{' { + // This is probably a variable + suffix.Kind = SuffixKindVariable + + // Find variable name + i := strings.IndexByte(remaining, '}') + if i < 0 { + return nil, fmt.Errorf("missing '}' in: %s", path) + } + variableName := strings.TrimSpace(remaining[1:i]) + remaining = remaining[i+1:] + + if options.SupportRegExp { + // See if it has regular expression + i = strings.IndexByte(variableName, '|') + if i >= 0 { + suffix.Kind = SuffixKindRegExp + suffix.Pattern = strings.TrimSpace(variableName[i+1:]) + variableName = strings.TrimSpace(variableName[:i]) + } + } + if suffix.Kind == SuffixKindVariable && options.SupportWildcard { + if strings.HasSuffix(variableName, "*") { + suffix.Kind = SuffixKindEverything + } + } + variableNames = append(variableNames, variableName) + } + } + + // Find existing matcher + for _, existing := range currentNode.Suffixes { + if EqualSuffix(existing, suffix) { + currentNode = existing.Node + continue loop + } + } + + // Compile regular expression + if suffix.Kind == SuffixKindRegExp { + regExp, err := regexp.Compile(suffix.Pattern) + if err != nil { + return nil, fmt.Errorf("invalid regular expression in: %s", path) + } + suffix.regExp = regExp + } + + // Create new node + newNode := &Node{} + suffix.Node = newNode + currentNode.Suffixes = append(currentNode.Suffixes, suffix) + sort.Sort(currentNode.Suffixes) + currentNode = newNode + continue loop + } +} + +func (currentNode *Node) Match(path string) (*Node, []string) { + for strings.HasSuffix(path, "/") { + path = path[:len(path)-1] + } + variableValues := make([]string, 0, 8) + return currentNode.matchRemaining(path, variableValues) +} + +func (currentNode *Node) matchRemaining(remaining string, paramValues []string) (*Node, []string) { + // Check if this node matches + if len(remaining) == 0 && currentNode.Value != nil { + return currentNode, paramValues + } + + // See if any suffix matches + for _, suffix := range currentNode.Suffixes { + var resultNode *Node + var resultValues []string + switch suffix.Kind { + case SuffixKindConstant: + pattern := suffix.Pattern + if strings.HasPrefix(remaining, pattern) { + newRemaining := remaining[len(pattern):] + resultNode, resultValues = suffix.Node.matchRemaining(newRemaining, paramValues) + } else if len(remaining) == 0 && pattern == "/" { + resultNode, resultValues = suffix.Node.matchRemaining(remaining, paramValues) + } + case SuffixKindVariable: + i := strings.IndexByte(remaining, '/') + if i < 0 { + i = len(remaining) + } + newParamValues := append(paramValues, remaining[:i]) + newRemaining := remaining[i:] + resultNode, resultValues = suffix.Node.matchRemaining(newRemaining, newParamValues) + case SuffixKindEverything: + newParamValues := append(paramValues, remaining) + resultNode, resultValues = suffix.Node, newParamValues + case SuffixKindRegExp: + i := strings.IndexByte(remaining, '/') + if i < 0 { + i = len(remaining) + } + paramValue := remaining[:i] + regExp := suffix.regExp + if regExp.MatchString(paramValue) { + matches := regExp.FindStringSubmatch(paramValue) + if len(matches) > 1 { + paramValue = matches[1] + } + newParamValues := append(paramValues, paramValue) + newRemaining := remaining[i:] + resultNode, resultValues = suffix.Node.matchRemaining(newRemaining, newParamValues) + } + } + if resultNode != nil && resultNode.Value != nil { + // This suffix matched + return resultNode, resultValues + } + } + + // No suffix matched + return nil, nil +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/routers/legacy/router.go b/index/server/vendor/github.com/getkin/kin-openapi/routers/legacy/router.go new file mode 100644 index 00000000..911422b8 --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/routers/legacy/router.go @@ -0,0 +1,167 @@ +// Package legacy implements a router. +// +// It differs from the gorilla/mux router: +// * it provides granular errors: "path not found", "method not allowed", "variable missing from path" +// * it does not handle matching routes with extensions (e.g. /books/{id}.json) +// * it handles path patterns with a different syntax (e.g. /params/{x}/{y}/{z.*}) +package legacy + +import ( + "context" + "errors" + "fmt" + "net/http" + "strings" + + "github.com/getkin/kin-openapi/openapi3" + "github.com/getkin/kin-openapi/routers" + "github.com/getkin/kin-openapi/routers/legacy/pathpattern" +) + +// Routers maps a HTTP request to a Router. +type Routers []*Router + +// FindRoute extracts the route and parameters of an http.Request +func (rs Routers) FindRoute(req *http.Request) (routers.Router, *routers.Route, map[string]string, error) { + for _, router := range rs { + // Skip routers that have DO NOT have servers + if len(router.doc.Servers) == 0 { + continue + } + route, pathParams, err := router.FindRoute(req) + if err == nil { + return router, route, pathParams, nil + } + } + for _, router := range rs { + // Skip routers that DO have servers + if len(router.doc.Servers) > 0 { + continue + } + route, pathParams, err := router.FindRoute(req) + if err == nil { + return router, route, pathParams, nil + } + } + return nil, nil, nil, &routers.RouteError{ + Reason: "none of the routers match", + } +} + +// Router maps a HTTP request to an OpenAPI operation. +type Router struct { + doc *openapi3.T + pathNode *pathpattern.Node +} + +// NewRouter creates a new router. +// +// If the given OpenAPIv3 document has servers, router will use them. +// All operations of the document will be added to the router. +func NewRouter(doc *openapi3.T, opts ...openapi3.ValidationOption) (routers.Router, error) { + if err := doc.Validate(context.Background(), opts...); err != nil { + return nil, fmt.Errorf("validating OpenAPI failed: %w", err) + } + router := &Router{doc: doc} + root := router.node() + for path, pathItem := range doc.Paths { + for method, operation := range pathItem.Operations() { + method = strings.ToUpper(method) + if err := root.Add(method+" "+path, &routers.Route{ + Spec: doc, + Path: path, + PathItem: pathItem, + Method: method, + Operation: operation, + }, nil); err != nil { + return nil, err + } + } + } + return router, nil +} + +// AddRoute adds a route in the router. +func (router *Router) AddRoute(route *routers.Route) error { + method := route.Method + if method == "" { + return errors.New("route is missing method") + } + method = strings.ToUpper(method) + path := route.Path + if path == "" { + return errors.New("route is missing path") + } + return router.node().Add(method+" "+path, router, nil) +} + +func (router *Router) node() *pathpattern.Node { + root := router.pathNode + if root == nil { + root = &pathpattern.Node{} + router.pathNode = root + } + return root +} + +// FindRoute extracts the route and parameters of an http.Request +func (router *Router) FindRoute(req *http.Request) (*routers.Route, map[string]string, error) { + method, url := req.Method, req.URL + doc := router.doc + + // Get server + servers := doc.Servers + var server *openapi3.Server + var remainingPath string + var pathParams map[string]string + if len(servers) == 0 { + remainingPath = url.Path + } else { + var paramValues []string + server, paramValues, remainingPath = servers.MatchURL(url) + if server == nil { + return nil, nil, &routers.RouteError{ + Reason: routers.ErrPathNotFound.Error(), + } + } + pathParams = make(map[string]string) + paramNames, err := server.ParameterNames() + if err != nil { + return nil, nil, err + } + for i, value := range paramValues { + name := paramNames[i] + pathParams[name] = value + } + } + + // Get PathItem + root := router.node() + var route *routers.Route + node, paramValues := root.Match(method + " " + remainingPath) + if node != nil { + route, _ = node.Value.(*routers.Route) + } + if route == nil { + pathItem := doc.Paths[remainingPath] + if pathItem == nil { + return nil, nil, &routers.RouteError{Reason: routers.ErrPathNotFound.Error()} + } + if pathItem.GetOperation(method) == nil { + return nil, nil, &routers.RouteError{Reason: routers.ErrMethodNotAllowed.Error()} + } + } + + if pathParams == nil { + pathParams = make(map[string]string, len(paramValues)) + } + paramKeys := node.VariableNames + for i, value := range paramValues { + key := paramKeys[i] + if strings.HasSuffix(key, "*") { + key = key[:len(key)-1] + } + pathParams[key] = value + } + return route, pathParams, nil +} diff --git a/index/server/vendor/github.com/getkin/kin-openapi/routers/types.go b/index/server/vendor/github.com/getkin/kin-openapi/routers/types.go new file mode 100644 index 00000000..93746cfe --- /dev/null +++ b/index/server/vendor/github.com/getkin/kin-openapi/routers/types.go @@ -0,0 +1,42 @@ +package routers + +import ( + "net/http" + + "github.com/getkin/kin-openapi/openapi3" +) + +// Router helps link http.Request.s and an OpenAPIv3 spec +type Router interface { + // FindRoute matches an HTTP request with the operation it resolves to. + // Hosts are matched from the OpenAPIv3 servers key. + // + // If you experience ErrPathNotFound and have localhost hosts specified as your servers, + // turning these server URLs as relative (leaving only the path) should resolve this. + // + // See openapi3filter for example uses with request and response validation. + FindRoute(req *http.Request) (route *Route, pathParams map[string]string, err error) +} + +// Route describes the operation an http.Request can match +type Route struct { + Spec *openapi3.T + Server *openapi3.Server + Path string + PathItem *openapi3.PathItem + Method string + Operation *openapi3.Operation +} + +// ErrPathNotFound is returned when no route match is found +var ErrPathNotFound error = &RouteError{"no matching operation was found"} + +// ErrMethodNotAllowed is returned when no method of the matched route matches +var ErrMethodNotAllowed error = &RouteError{"method not allowed"} + +// RouteError describes Router errors +type RouteError struct { + Reason string +} + +func (e *RouteError) Error() string { return e.Reason } diff --git a/index/server/vendor/github.com/gin-gonic/gin/.golangci.yml b/index/server/vendor/github.com/gin-gonic/gin/.golangci.yml new file mode 100644 index 00000000..c5e1de38 --- /dev/null +++ b/index/server/vendor/github.com/gin-gonic/gin/.golangci.yml @@ -0,0 +1,39 @@ +run: + timeout: 5m +linters: + enable: + - asciicheck + - depguard + - dogsled + - durationcheck + - errcheck + - errorlint + - exportloopref + - gci + - gofmt + - goimports + - gosec + - misspell + - nakedret + - nilerr + - nolintlint + - revive + - wastedassign +issues: + exclude-rules: + - linters: + - structcheck + - unused + text: "`data` is unused" + - linters: + - staticcheck + text: "SA1019:" + - linters: + - revive + text: "var-naming:" + - linters: + - revive + text: "exported:" + - path: _test\.go + linters: + - gosec # security is not make sense in tests diff --git a/index/server/vendor/github.com/gin-gonic/gin/.goreleaser.yaml b/index/server/vendor/github.com/gin-gonic/gin/.goreleaser.yaml new file mode 100644 index 00000000..e435e56a --- /dev/null +++ b/index/server/vendor/github.com/gin-gonic/gin/.goreleaser.yaml @@ -0,0 +1,57 @@ +project_name: gin + +builds: + - + # If true, skip the build. + # Useful for library projects. + # Default is false + skip: true + +changelog: + # Set it to true if you wish to skip the changelog generation. + # This may result in an empty release notes on GitHub/GitLab/Gitea. + skip: false + + # Changelog generation implementation to use. + # + # Valid options are: + # - `git`: uses `git log`; + # - `github`: uses the compare GitHub API, appending the author login to the changelog. + # - `gitlab`: uses the compare GitLab API, appending the author name and email to the changelog. + # - `github-native`: uses the GitHub release notes generation API, disables the groups feature. + # + # Defaults to `git`. + use: git + + # Sorts the changelog by the commit's messages. + # Could either be asc, desc or empty + # Default is empty + sort: asc + + # Group commits messages by given regex and title. + # Order value defines the order of the groups. + # Proving no regex means all commits will be grouped under the default group. + # Groups are disabled when using github-native, as it already groups things by itself. + # + # Default is no groups. + groups: + - title: Features + regexp: "^.*feat[(\\w)]*:+.*$" + order: 0 + - title: 'Bug fixes' + regexp: "^.*fix[(\\w)]*:+.*$" + order: 1 + - title: 'Enhancements' + regexp: "^.*chore[(\\w)]*:+.*$" + order: 2 + - title: Others + order: 999 + + filters: + # Commit messages matching the regexp listed here will be removed from + # the changelog + # Default is empty + exclude: + - '^docs' + - 'CICD' + - typo diff --git a/index/server/vendor/github.com/gin-gonic/gin/.travis.yml b/index/server/vendor/github.com/gin-gonic/gin/.travis.yml deleted file mode 100644 index bcc21414..00000000 --- a/index/server/vendor/github.com/gin-gonic/gin/.travis.yml +++ /dev/null @@ -1,48 +0,0 @@ -language: go - -matrix: - fast_finish: true - include: - - go: 1.13.x - - go: 1.13.x - env: - - TESTTAGS=nomsgpack - - go: 1.14.x - - go: 1.14.x - env: - - TESTTAGS=nomsgpack - - go: 1.15.x - - go: 1.15.x - env: - - TESTTAGS=nomsgpack - - go: master - -git: - depth: 10 - -before_install: - - if [[ "${GO111MODULE}" = "on" ]]; then mkdir "${HOME}/go"; export GOPATH="${HOME}/go"; fi - -install: - - if [[ "${GO111MODULE}" = "on" ]]; then go mod download; fi - - if [[ "${GO111MODULE}" = "on" ]]; then export PATH="${GOPATH}/bin:${GOROOT}/bin:${PATH}"; fi - - if [[ "${GO111MODULE}" = "on" ]]; then make tools; fi - -go_import_path: github.com/gin-gonic/gin - -script: - - make vet - - make fmt-check - - make misspell-check - - make test - -after_success: - - bash <(curl -s https://codecov.io/bash) - -notifications: - webhooks: - urls: - - https://webhooks.gitter.im/e/7f95bf605c4d356372f4 - on_success: change # options: [always|never|change] default: always - on_failure: always # options: [always|never|change] default: always - on_start: false # default: false diff --git a/index/server/vendor/github.com/gin-gonic/gin/AUTHORS.md b/index/server/vendor/github.com/gin-gonic/gin/AUTHORS.md index c634e6be..b4773ef3 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/AUTHORS.md +++ b/index/server/vendor/github.com/gin-gonic/gin/AUTHORS.md @@ -2,232 +2,405 @@ List of all the awesome people working to make Gin the best Web Framework in Go. ## gin 1.x series authors -**Gin Core Team:** Bo-Yi Wu (@appleboy), 田欧 (@thinkerou), Javier Provecho (@javierprovecho) +**Gin Core Team:** Bo-Yi Wu (@appleboy), thinkerou (@thinkerou), Javier Provecho (@javierprovecho) ## gin 0.x series authors **Maintainers:** Manu Martinez-Almeida (@manucorporat), Javier Provecho (@javierprovecho) -People and companies, who have contributed, in alphabetical order. - -**@858806258 (杰哥)** -- Fix typo in example - - -**@achedeuzot (Klemen Sever)** -- Fix newline debug printing - - -**@adammck (Adam Mckaig)** -- Add MIT license - - -**@AlexanderChen1989 (Alexander)** -- Typos in README - - -**@alexanderdidenko (Aleksandr Didenko)** -- Add support multipart/form-data - - -**@alexandernyquist (Alexander Nyquist)** -- Using template.Must to fix multiple return issue -- ★ Added support for OPTIONS verb -- ★ Setting response headers before calling WriteHeader -- Improved documentation for model binding -- ★ Added Content.Redirect() -- ★ Added tons of Unit tests - - -**@austinheap (Austin Heap)** -- Added travis CI integration - - -**@andredublin (Andre Dublin)** -- Fix typo in comment - - -**@bredov (Ludwig Valda Vasquez)** -- Fix html templating in debug mode - - -**@bluele (Jun Kimura)** -- Fixes code examples in README - - -**@chad-russell** -- ★ Support for serializing gin.H into XML - - -**@dickeyxxx (Jeff Dickey)** -- Typos in README -- Add example about serving static files - - -**@donileo (Adonis)** -- Add NoMethod handler - - -**@dutchcoders (DutchCoders)** -- ★ Fix security bug that allows client to spoof ip -- Fix typo. r.HTMLTemplates -> SetHTMLTemplate - - -**@el3ctro- (Joshua Loper)** -- Fix typo in example - - -**@ethankan (Ethan Kan)** -- Unsigned integers in binding - - -**(Evgeny Persienko)** -- Validate sub structures - - -**@frankbille (Frank Bille)** -- Add support for HTTP Realm Auth - - -**@fmd (Fareed Dudhia)** -- Fix typo. SetHTTPTemplate -> SetHTMLTemplate - - -**@ironiridis (Christopher Harrington)** -- Remove old reference - - -**@jammie-stackhouse (Jamie Stackhouse)** -- Add more shortcuts for router methods - - -**@jasonrhansen** -- Fix spelling and grammar errors in documentation - - -**@JasonSoft (Jason Lee)** -- Fix typo in comment - - -**@joiggama (Ignacio Galindo)** -- Add utf-8 charset header on renders - - -**@julienschmidt (Julien Schmidt)** -- gofmt the code examples - - -**@kelcecil (Kel Cecil)** -- Fix readme typo - - -**@kyledinh (Kyle Dinh)** -- Adds RunTLS() - - -**@LinusU (Linus Unnebäck)** -- Small fixes in README - - -**@loongmxbt (Saint Asky)** -- Fix typo in example - - -**@lucas-clemente (Lucas Clemente)** -- ★ work around path.Join removing trailing slashes from routes - - -**@mattn (Yasuhiro Matsumoto)** -- Improve color logger - - -**@mdigger (Dmitry Sedykh)** -- Fixes Form binding when content-type is x-www-form-urlencoded -- No repeat call c.Writer.Status() in gin.Logger -- Fixes Content-Type for json render - - -**@mirzac (Mirza Ceric)** -- Fix debug printing - - -**@mopemope (Yutaka Matsubara)** -- ★ Adds Godep support (Dependencies Manager) -- Fix variadic parameter in the flexible render API -- Fix Corrupted plain render -- Add Pluggable View Renderer Example - - -**@msemenistyi (Mykyta Semenistyi)** -- update Readme.md. Add code to String method - - -**@msoedov (Sasha Myasoedov)** -- ★ Adds tons of unit tests. - - -**@ngerakines (Nick Gerakines)** -- ★ Improves API, c.GET() doesn't panic -- Adds MustGet() method - - -**@r8k (Rajiv Kilaparti)** -- Fix Port usage in README. - - -**@rayrod2030 (Ray Rodriguez)** -- Fix typo in example - - -**@rns** -- Fix typo in example - - -**@RobAWilkinson (Robert Wilkinson)** -- Add example of forms and params - - -**@rogierlommers (Rogier Lommers)** -- Add updated static serve example - -**@rw-access (Ross Wolf)** -- Added support to mix exact and param routes - -**@se77en (Damon Zhao)** -- Improve color logging - - -**@silasb (Silas Baronda)** -- Fixing quotes in README - - -**@SkuliOskarsson (Skuli Oskarsson)** -- Fixes some texts in README II - - -**@slimmy (Jimmy Pettersson)** -- Added messages for required bindings - - -**@smira (Andrey Smirnov)** -- Add support for ignored/unexported fields in binding - - -**@superalsrk (SRK.Lyu)** -- Update httprouter godeps - - -**@tebeka (Miki Tebeka)** -- Use net/http constants instead of numeric values - - -**@techjanitor** -- Update context.go reserved IPs - - -**@yosssi (Keiji Yoshida)** -- Fix link in README +------ +People and companies, who have contributed, in alphabetical order. -**@yuyabee** -- Fixed README +- 178inaba <178inaba@users.noreply.github.com> +- A. F +- ABHISHEK SONI +- Abhishek Chanda +- Abner Chen +- AcoNCodes +- Adam Dratwinski +- Adam Mckaig +- Adam Zielinski +- Adonis +- Alan Wang +- Albin Gilles +- Aleksandr Didenko +- Alessandro (Ale) Segala <43508+ItalyPaleAle@users.noreply.github.com> +- Alex +- Alexander +- Alexander Lokhman +- Alexander Melentyev <55826637+alexander-melentyev@users.noreply.github.com> +- Alexander Nyquist +- Allen Ren +- AllinGo +- Ammar Bandukwala +- An Xiao (Luffy) +- Andre Dublin <81dublin@gmail.com> +- Andrew Szeto +- Andrey Abramov +- Andrey Nering +- Andrey Smirnov +- Andrii Bubis +- André Bazaglia +- Andy Pan +- Antoine GIRARD +- Anup Kumar Panwar <1anuppanwar@gmail.com> +- Aravinth Sundaram +- Artem +- Ashwani +- Aurelien Regat-Barrel +- Austin Heap +- Barnabus +- Bo-Yi Wu +- Boris Borshevsky +- Boyi Wu +- BradyBromley <51128276+BradyBromley@users.noreply.github.com> +- Brendan Fosberry +- Brian Wigginton +- Carlos Eduardo +- Chad Russell +- Charles +- Christian Muehlhaeuser +- Christian Persson +- Christopher Harrington +- Damon Zhao +- Dan Markham +- Dang Nguyen +- Daniel Krom +- Daniel M. Lambea +- Danieliu +- David Irvine +- David Zhang +- Davor Kapsa +- DeathKing +- Dennis Cho <47404603+forest747@users.noreply.github.com> +- Dmitry Dorogin +- Dmitry Kutakov +- Dmitry Sedykh +- Don2Quixote <35610661+Don2Quixote@users.noreply.github.com> +- Donn Pebe +- Dustin Decker +- Eason Lin +- Edward Betts +- Egor Seredin <4819888+agmt@users.noreply.github.com> +- Emmanuel Goh +- Equim +- Eren A. Akyol +- Eric_Lee +- Erik Bender +- Ethan Kan +- Evgeny Persienko +- Faisal Alam +- Fareed Dudhia +- Filip Figiel +- Florian Polster +- Frank Bille +- Franz Bettag +- Ganlv +- Gaozhen Ying +- George Gabolaev +- George Kirilenko +- Georges Varouchas +- Gordon Tyler +- Harindu Perera +- Helios <674876158@qq.com> +- Henry Kwan +- Henry Yee +- Himanshu Mishra +- Hiroyuki Tanaka +- Ibraheem Ahmed +- Ignacio Galindo +- Igor H. Vieira +- Ildar1111 <54001462+Ildar1111@users.noreply.github.com> +- Iskander (Alex) Sharipov +- Ismail Gjevori +- Ivan Chen +- JINNOUCHI Yasushi +- James Pettyjohn +- Jamie Stackhouse +- Jason Lee +- Javier Provecho +- Javier Provecho +- Javier Provecho +- Javier Provecho Fernandez +- Javier Provecho Fernandez +- Jean-Christophe Lebreton +- Jeff +- Jeremy Loy +- Jim Filippou +- Jimmy Pettersson +- John Bampton +- Johnny Dallas +- Johnny Dallas +- Jonathan (JC) Chen +- Josep Jesus Bigorra Algaba <42377845+averageflow@users.noreply.github.com> +- Josh Horowitz +- Joshua Loper +- Julien Schmidt +- Jun Kimura +- Justin Beckwith +- Justin Israel +- Justin Mayhew +- Jérôme Laforge +- Kacper Bąk <56700396+53jk1@users.noreply.github.com> +- Kamron Batman +- Kane Rogers +- Kaushik Neelichetty +- Keiji Yoshida +- Kel Cecil +- Kevin Mulvey +- Kevin Zhu +- Kirill Motkov +- Klemen Sever +- Kristoffer A. Iversen +- Krzysztof Szafrański +- Kumar McMillan +- Kyle Mcgill +- Lanco <35420416+lancoLiu@users.noreply.github.com> +- Levi Olson +- Lin Kao-Yuan +- Linus Unnebäck +- Lucas Clemente +- Ludwig Valda Vasquez +- Luis GG +- MW Lim +- Maksimov Sergey +- Manjusaka +- Manu MA +- Manu MA +- Manu Mtz-Almeida +- Manu Mtz.-Almeida +- Manuel Alonso +- Mara Kim +- Mario Kostelac +- Martin Karlsch +- Matt Newberry +- Matt Williams +- Matthieu MOREL +- Max Hilbrunner +- Maxime Soulé +- MetalBreaker +- Michael Puncel +- MichaelDeSteven <51652084+MichaelDeSteven@users.noreply.github.com> +- Mike <38686456+icy4ever@users.noreply.github.com> +- Mike Stipicevic +- Miki Tebeka +- Miles +- Mirza Ceric +- Mykyta Semenistyi +- Naoki Takano +- Ngalim Siregar +- Ni Hao +- Nick Gerakines +- Nikifor Seryakov +- Notealot <714804968@qq.com> +- Olivier Mengué +- Olivier Robardet +- Pablo Moncada +- Pablo Moncada +- Panmax <967168@qq.com> +- Peperoncino <2wua4nlyi@gmail.com> +- Philipp Meinen +- Pierre Massat +- Qt +- Quentin ROYER +- README Bot <35302948+codetriage-readme-bot@users.noreply.github.com> +- Rafal Zajac +- Rahul Datta Roy +- Rajiv Kilaparti +- Raphael Gavache +- Ray Rodriguez +- Regner Blok-Andersen +- Remco +- Rex Lee(李俊) +- Richard Lee +- Riverside +- Robert Wilkinson +- Rogier Lommers +- Rohan Pai +- Romain Beuque +- Roman Belyakovsky +- Roman Zaynetdinov <627197+zaynetro@users.noreply.github.com> +- Roman Zaynetdinov +- Ronald Petty +- Ross Wolf <31489089+rw-access@users.noreply.github.com> +- Roy Lou +- Rubi <14269809+codenoid@users.noreply.github.com> +- Ryan <46182144+ryanker@users.noreply.github.com> +- Ryan J. Yoder +- SRK.Lyu +- Sai +- Samuel Abreu +- Santhosh Kumar +- Sasha Melentyev +- Sasha Myasoedov +- Segev Finer +- Sergey Egorov +- Sergey Fedchenko +- Sergey Gonimar +- Sergey Ponomarev +- Serica <943914044@qq.com> +- Shamus Taylor +- Shilin Wang +- Shuo +- Skuli Oskarsson +- Snawoot +- Sridhar Ratnakumar +- Steeve Chailloux +- Sudhir Mishra +- Suhas Karanth +- TaeJun Park +- Tatsuya Hoshino +- Tevic +- Tevin Jeffrey +- The Gitter Badger +- Thibault Jamet +- Thomas Boerger +- Thomas Schaffer +- Tommy Chu +- Tudor Roman +- Uwe Dauernheim +- Valentine Oragbakosi +- Vas N +- Vasilyuk Vasiliy +- Victor Castell +- Vince Yuan +- Vyacheslav Dubinin +- Waynerv +- Weilin Shi <934587911@qq.com> +- Xudong Cai +- Yasuhiro Matsumoto +- Yehezkiel Syamsuhadi +- Yoshiki Nakagawa +- Yoshiyuki Kinjo +- Yue Yang +- ZYunH +- Zach Newburgh +- Zasda Yusuf Mikail +- ZhangYunHao +- ZhiFeng Hu +- Zhu Xi +- a2tt +- ahuigo <1781999+ahuigo@users.noreply.github.com> +- ali +- aljun +- andrea +- andriikushch +- anoty +- awkj +- axiaoxin <254606826@qq.com> +- bbiao +- bestgopher <84328409@qq.com> +- betahu +- bigwheel +- bn4t <17193640+bn4t@users.noreply.github.com> +- bullgare +- chainhelen +- chenyang929 +- chriswhelix +- collinmsn <4130944@qq.com> +- cssivision +- danielalves +- delphinus +- dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> +- dickeyxxx +- edebernis +- error10 +- esplo +- eudore <30709860+eudore@users.noreply.github.com> +- ffhelicopter <32922889+ffhelicopter@users.noreply.github.com> +- filikos <11477309+filikos@users.noreply.github.com> +- forging2012 +- goqihoo +- grapeVine +- guonaihong +- heige +- heige +- hellojukay +- henrylee2cn +- htobenothing +- iamhesir <78344375+iamhesir@users.noreply.github.com> +- ijaa +- ishanray +- ishanray +- itcloudy <272685110@qq.com> +- jarodsong6 +- jasonrhansen +- jincheng9 +- joeADSP <75027008+joeADSP@users.noreply.github.com> +- junfengye +- kaiiak +- kebo +- keke <19yamashita15@gmail.com> +- kishor kunal raj <68464660+kishorkunal-raj@users.noreply.github.com> +- kyledinh +- lantw44 +- likakuli <1154584512@qq.com> +- linfangrong +- linzi <873804682@qq.com> +- llgoer +- long-road <13412081338@163.com> +- mbesancon +- mehdy +- metal A-wing +- micanzhang +- minarc +- mllu +- mopemoepe +- msoedov +- mstmdev +- novaeye +- olebedev +- phithon +- pjgg +- qm012 <67568757+qm012@users.noreply.github.com> +- raymonder jin +- rns +- root@andrea:~# +- sekky0905 <20237968+sekky0905@users.noreply.github.com> +- senhtry +- shadrus +- silasb +- solos +- songjiayang +- sope +- srt180 <30768686+srt180@users.noreply.github.com> +- stackerzzq +- sunshineplan +- syssam +- techjanitor +- techjanitor +- thinkerou +- thinkgo <49174849+thinkgos@users.noreply.github.com> +- tsirolnik +- tyltr <31768692+tylitianrui@users.noreply.github.com> +- vinhha96 +- voidman +- vz +- wei +- weibaohui +- whirosan +- willnewrelic +- wssccc +- wuhuizuo +- xyb +- y-yagi +- yiranzai +- youzeliang +- yugu +- yuyabe +- zebozhuang +- zero11-0203 <93071220+zero11-0203@users.noreply.github.com> +- zesani <7sin@outlook.co.th> +- zhanweidu +- zhing +- ziheng +- zzjin +- 森 優太 <59682979+uta-mori@users.noreply.github.com> +- 杰哥 <858806258@qq.com> +- 涛叔 +- 市民233 +- 尹宝强 +- 梦溪笔谈 +- 飞雪无情 +- 寻寻觅觅的Gopher diff --git a/index/server/vendor/github.com/gin-gonic/gin/CHANGELOG.md b/index/server/vendor/github.com/gin-gonic/gin/CHANGELOG.md index 4c806a5a..1bc51a8c 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/CHANGELOG.md +++ b/index/server/vendor/github.com/gin-gonic/gin/CHANGELOG.md @@ -1,5 +1,53 @@ # Gin ChangeLog +## Gin v1.8.1 + +### ENHANCEMENTS + +* feat(context): add ContextWithFallback feature flag [#3172](https://github.com/gin-gonic/gin/pull/3172) + +## Gin v1.8.0 + +## Break Changes + +* TrustedProxies: Add default IPv6 support and refactor [#2967](https://github.com/gin-gonic/gin/pull/2967). Please replace `RemoteIP() (net.IP, bool)` with `RemoteIP() net.IP` +* gin.Context with fallback value from gin.Context.Request.Context() [#2751](https://github.com/gin-gonic/gin/pull/2751) + +### BUGFIXES + +* Fixed SetOutput() panics on go 1.17 [#2861](https://github.com/gin-gonic/gin/pull/2861) +* Fix: wrong when wildcard follows named param [#2983](https://github.com/gin-gonic/gin/pull/2983) +* Fix: missing sameSite when do context.reset() [#3123](https://github.com/gin-gonic/gin/pull/3123) + +### ENHANCEMENTS + +* Use Header() instead of deprecated HeaderMap [#2694](https://github.com/gin-gonic/gin/pull/2694) +* RouterGroup.Handle regular match optimization of http method [#2685](https://github.com/gin-gonic/gin/pull/2685) +* Add support go-json, another drop-in json replacement [#2680](https://github.com/gin-gonic/gin/pull/2680) +* Use errors.New to replace fmt.Errorf will much better [#2707](https://github.com/gin-gonic/gin/pull/2707) +* Use Duration.Truncate for truncating precision [#2711](https://github.com/gin-gonic/gin/pull/2711) +* Get client IP when using Cloudflare [#2723](https://github.com/gin-gonic/gin/pull/2723) +* Optimize code adjust [#2700](https://github.com/gin-gonic/gin/pull/2700/files) +* Optimize code and reduce code cyclomatic complexity [#2737](https://github.com/gin-gonic/gin/pull/2737) +* Improve sliceValidateError.Error performance [#2765](https://github.com/gin-gonic/gin/pull/2765) +* Support custom struct tag [#2720](https://github.com/gin-gonic/gin/pull/2720) +* Improve router group tests [#2787](https://github.com/gin-gonic/gin/pull/2787) +* Fallback Context.Deadline() Context.Done() Context.Err() to Context.Request.Context() [#2769](https://github.com/gin-gonic/gin/pull/2769) +* Some codes optimize [#2830](https://github.com/gin-gonic/gin/pull/2830) [#2834](https://github.com/gin-gonic/gin/pull/2834) [#2838](https://github.com/gin-gonic/gin/pull/2838) [#2837](https://github.com/gin-gonic/gin/pull/2837) [#2788](https://github.com/gin-gonic/gin/pull/2788) [#2848](https://github.com/gin-gonic/gin/pull/2848) [#2851](https://github.com/gin-gonic/gin/pull/2851) [#2701](https://github.com/gin-gonic/gin/pull/2701) +* TrustedProxies: Add default IPv6 support and refactor [#2967](https://github.com/gin-gonic/gin/pull/2967) +* Test(route): expose performRequest func [#3012](https://github.com/gin-gonic/gin/pull/3012) +* Support h2c with prior knowledge [#1398](https://github.com/gin-gonic/gin/pull/1398) +* Feat attachment filename support utf8 [#3071](https://github.com/gin-gonic/gin/pull/3071) +* Feat: add StaticFileFS [#2749](https://github.com/gin-gonic/gin/pull/2749) +* Feat(context): return GIN Context from Value method [#2825](https://github.com/gin-gonic/gin/pull/2825) +* Feat: automatically SetMode to TestMode when run go test [#3139](https://github.com/gin-gonic/gin/pull/3139) +* Add TOML bining for gin [#3081](https://github.com/gin-gonic/gin/pull/3081) +* IPv6 add default trusted proxies [#3033](https://github.com/gin-gonic/gin/pull/3033) + +### DOCS + +* Add note about nomsgpack tag to the readme [#2703](https://github.com/gin-gonic/gin/pull/2703) + ## Gin v1.7.7 ### BUGFIXES diff --git a/index/server/vendor/github.com/gin-gonic/gin/CONTRIBUTING.md b/index/server/vendor/github.com/gin-gonic/gin/CONTRIBUTING.md index 97daa808..d1c723c6 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/CONTRIBUTING.md +++ b/index/server/vendor/github.com/gin-gonic/gin/CONTRIBUTING.md @@ -8,6 +8,6 @@ - With pull requests: - Open your pull request against `master` - Your pull request should have no more than two commits, if not you should squash them. - - It should pass all tests in the available continuous integration systems such as TravisCI. + - It should pass all tests in the available continuous integration systems such as GitHub Actions. - You should add/modify tests to cover your proposed code changes. - If your pull request contains a new feature, please document it on the README. diff --git a/index/server/vendor/github.com/gin-gonic/gin/Makefile b/index/server/vendor/github.com/gin-gonic/gin/Makefile index 1a991939..5d55b444 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/Makefile +++ b/index/server/vendor/github.com/gin-gonic/gin/Makefile @@ -1,5 +1,6 @@ GO ?= go GOFMT ?= gofmt "-s" +GO_VERSION=$(shell $(GO) version | cut -c 14- | cut -d' ' -f1 | cut -d'.' -f2) PACKAGES ?= $(shell $(GO) list ./...) VETPACKAGES ?= $(shell $(GO) list ./... | grep -v /examples/) GOFILES := $(shell find . -name "*.go") @@ -67,5 +68,10 @@ misspell: .PHONY: tools tools: - go install golang.org/x/lint/golint; \ - go install github.com/client9/misspell/cmd/misspell; + @if [ $(GO_VERSION) -gt 15 ]; then \ + $(GO) install golang.org/x/lint/golint@latest; \ + $(GO) install github.com/client9/misspell/cmd/misspell@latest; \ + elif [ $(GO_VERSION) -lt 16 ]; then \ + $(GO) install golang.org/x/lint/golint; \ + $(GO) install github.com/client9/misspell/cmd/misspell; \ + fi diff --git a/index/server/vendor/github.com/gin-gonic/gin/README.md b/index/server/vendor/github.com/gin-gonic/gin/README.md index 9bf459b0..5cc8321b 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/README.md +++ b/index/server/vendor/github.com/gin-gonic/gin/README.md @@ -2,7 +2,7 @@ -[![Build Status](https://travis-ci.org/gin-gonic/gin.svg)](https://travis-ci.org/gin-gonic/gin) +[![Build Status](https://github.com/gin-gonic/gin/workflows/Run%20Tests/badge.svg?branch=master)](https://github.com/gin-gonic/gin/actions?query=branch%3Amaster) [![codecov](https://codecov.io/gh/gin-gonic/gin/branch/master/graph/badge.svg)](https://codecov.io/gh/gin-gonic/gin) [![Go Report Card](https://goreportcard.com/badge/github.com/gin-gonic/gin)](https://goreportcard.com/report/github.com/gin-gonic/gin) [![GoDoc](https://pkg.go.dev/badge/github.com/gin-gonic/gin?status.svg)](https://pkg.go.dev/github.com/gin-gonic/gin?tab=doc) @@ -23,7 +23,8 @@ Gin is a web framework written in Go (Golang). It features a martini-like API wi - [Quick start](#quick-start) - [Benchmarks](#benchmarks) - [Gin v1. stable](#gin-v1-stable) - - [Build with jsoniter](#build-with-jsoniter) + - [Build with jsoniter/go-json](#build-with-json-replacement) + - [Build without `MsgPack` rendering feature](#build-without-msgpack-rendering-feature) - [API Examples](#api-examples) - [Using GET, POST, PUT, PATCH, DELETE and OPTIONS](#using-get-post-put-patch-delete-and-options) - [Parameters in path](#parameters-in-path) @@ -77,7 +78,7 @@ Gin is a web framework written in Go (Golang). It features a martini-like API wi - [http2 server push](#http2-server-push) - [Define format for the log of routes](#define-format-for-the-log-of-routes) - [Set and get a cookie](#set-and-get-a-cookie) - - [Don't trust all proxies](#don't-trust-all-proxies) + - [Don't trust all proxies](#dont-trust-all-proxies) - [Testing](#testing) - [Users](#users) @@ -85,7 +86,7 @@ Gin is a web framework written in Go (Golang). It features a martini-like API wi To install Gin package, you need to install Go and set your Go workspace first. -1. The first need [Go](https://golang.org/) installed (**version 1.13+ is required**), then you can use the below Go command to install Gin. +1. You first need [Go](https://golang.org/) installed (**version 1.14+ is required**), then you can use the below Go command to install Gin. ```sh $ go get -u github.com/gin-gonic/gin @@ -113,12 +114,16 @@ $ cat example.go ```go package main -import "github.com/gin-gonic/gin" +import ( + "net/http" + + "github.com/gin-gonic/gin" +) func main() { r := gin.Default() r.GET("/ping", func(c *gin.Context) { - c.JSON(200, gin.H{ + c.JSON(http.StatusOK, gin.H{ "message": "pong", }) }) @@ -183,13 +188,28 @@ Gin uses a custom version of [HttpRouter](https://github.com/julienschmidt/httpr - [x] Battle tested. - [x] API frozen, new releases will not break your code. -## Build with [jsoniter](https://github.com/json-iterator/go) +## Build with json replacement -Gin uses `encoding/json` as default json package but you can change to [jsoniter](https://github.com/json-iterator/go) by build from other tags. +Gin uses `encoding/json` as default json package but you can change it by build from other tags. +[jsoniter](https://github.com/json-iterator/go) ```sh $ go build -tags=jsoniter . ``` +[go-json](https://github.com/goccy/go-json) +```sh +$ go build -tags=go_json . +``` + +## Build without `MsgPack` rendering feature + +Gin enables `MsgPack` rendering feature by default. But you can disable this feature by specifying `nomsgpack` build tag. + +```sh +$ go build -tags=nomsgpack . +``` + +This is useful to reduce the binary size of executable files. See the [detail information](https://github.com/gin-gonic/gin/pull/1852). ## API Examples @@ -241,14 +261,15 @@ func main() { // For each matched request Context will hold the route definition router.POST("/user/:name/*action", func(c *gin.Context) { - c.FullPath() == "/user/:name/*action" // true + b := c.FullPath() == "/user/:name/*action" // true + c.String(http.StatusOK, "%t", b) }) // This handler will add a new router for /user/groups. // Exact routes are resolved before param routes, regardless of the order they were defined. // Routes starting with /user/groups are never interpreted as /user/:name/... routes router.GET("/user/groups", func(c *gin.Context) { - c.String(http.StatusOK, "The available groups are [...]", name) + c.String(http.StatusOK, "The available groups are [...]") }) router.Run(":8080") @@ -283,7 +304,7 @@ func main() { message := c.PostForm("message") nick := c.DefaultPostForm("nick", "anonymous") - c.JSON(200, gin.H{ + c.JSON(http.StatusOK, gin.H{ "status": "posted", "message": message, "nick": nick, @@ -367,7 +388,7 @@ func main() { // Set a lower memory limit for multipart forms (default is 32 MiB) router.MaxMultipartMemory = 8 << 20 // 8 MiB router.POST("/upload", func(c *gin.Context) { - // single file + // Single file file, _ := c.FormFile("file") log.Println(file.Filename) @@ -496,6 +517,7 @@ func main() { // nested group testing := authorized.Group("testing") + // visit 0.0.0.0:8080/testing/analytics testing.GET("/analytics", analyticsEndpoint) } @@ -552,7 +574,7 @@ func main() { router := gin.Default() router.GET("/ping", func(c *gin.Context) { - c.String(200, "pong") + c.String(http.StatusOK, "pong") })    router.Run(":8080") @@ -584,7 +606,7 @@ func main() { router.Use(gin.Recovery()) router.GET("/ping", func(c *gin.Context) { - c.String(200, "pong") + c.String(http.StatusOK, "pong") }) router.Run(":8080") @@ -612,7 +634,7 @@ func main() { router := gin.Default() router.GET("/ping", func(c *gin.Context) { - c.String(200, "pong") + c.String(http.StatusOK, "pong") }) router.Run(":8080") @@ -631,7 +653,7 @@ func main() { router := gin.Default() router.GET("/ping", func(c *gin.Context) { - c.String(200, "pong") + c.String(http.StatusOK, "pong") }) router.Run(":8080") @@ -640,7 +662,7 @@ func main() { ### Model binding and validation -To bind a request body into a type, use model binding. We currently support binding of JSON, XML, YAML and standard form values (foo=bar&boo=baz). +To bind a request body into a type, use model binding. We currently support binding of JSON, XML, YAML, TOML and standard form values (foo=bar&boo=baz). Gin uses [**go-playground/validator/v10**](https://github.com/go-playground/validator) for validation. Check the full docs on tags usage [here](https://godoc.org/github.com/go-playground/validator#hdr-Baked_In_Validators_and_Tags). @@ -648,10 +670,10 @@ Note that you need to set the corresponding binding tag on all fields you want t Also, Gin provides two sets of methods for binding: - **Type** - Must bind - - **Methods** - `Bind`, `BindJSON`, `BindXML`, `BindQuery`, `BindYAML`, `BindHeader` + - **Methods** - `Bind`, `BindJSON`, `BindXML`, `BindQuery`, `BindYAML`, `BindHeader`, `BindTOML` - **Behavior** - These methods use `MustBindWith` under the hood. If there is a binding error, the request is aborted with `c.AbortWithError(400, err).SetType(ErrorTypeBind)`. This sets the response status code to 400 and the `Content-Type` header is set to `text/plain; charset=utf-8`. Note that if you try to set the response code after this, it will result in a warning `[GIN-debug] [WARNING] Headers were already written. Wanted to override status code 400 with 422`. If you wish to have greater control over the behavior, consider using the `ShouldBind` equivalent method. - **Type** - Should bind - - **Methods** - `ShouldBind`, `ShouldBindJSON`, `ShouldBindXML`, `ShouldBindQuery`, `ShouldBindYAML`, `ShouldBindHeader` + - **Methods** - `ShouldBind`, `ShouldBindJSON`, `ShouldBindXML`, `ShouldBindQuery`, `ShouldBindYAML`, `ShouldBindHeader`, `ShouldBindTOML`, - **Behavior** - These methods use `ShouldBindWith` under the hood. If there is a binding error, the error is returned and it is the developer's responsibility to handle the request and error appropriately. When using the Bind-method, Gin tries to infer the binder depending on the Content-Type header. If you are sure what you are binding, you can use `MustBindWith` or `ShouldBindWith`. @@ -687,7 +709,7 @@ func main() { // Example for binding XML ( // // - // user + // manu // 123 // ) router.POST("/loginXML", func(c *gin.Context) { @@ -830,6 +852,7 @@ package main import ( "log" + "net/http" "github.com/gin-gonic/gin" ) @@ -852,7 +875,7 @@ func startPage(c *gin.Context) { log.Println(person.Name) log.Println(person.Address) } - c.String(200, "Success") + c.String(http.StatusOK, "Success") } ``` @@ -866,6 +889,7 @@ package main import ( "log" + "net/http" "time" "github.com/gin-gonic/gin" @@ -889,7 +913,7 @@ func startPage(c *gin.Context) { var person Person // If `GET`, only `Form` binding engine (`query`) used. // If `POST`, first checks the `content-type` for `JSON` or `XML`, then uses `Form` (`form-data`). - // See more at https://github.com/gin-gonic/gin/blob/master/binding/binding.go#L48 + // See more at https://github.com/gin-gonic/gin/blob/master/binding/binding.go#L88 if c.ShouldBind(&person) == nil { log.Println(person.Name) log.Println(person.Address) @@ -898,7 +922,7 @@ func startPage(c *gin.Context) { log.Println(person.UnixTime) } - c.String(200, "Success") + c.String(http.StatusOK, "Success") } ``` @@ -914,7 +938,11 @@ See the [detail information](https://github.com/gin-gonic/gin/issues/846). ```go package main -import "github.com/gin-gonic/gin" +import ( + "net/http" + + "github.com/gin-gonic/gin" +) type Person struct { ID string `uri:"id" binding:"required,uuid"` @@ -926,10 +954,10 @@ func main() { route.GET("/:name/:id", func(c *gin.Context) { var person Person if err := c.ShouldBindUri(&person); err != nil { - c.JSON(400, gin.H{"msg": err}) + c.JSON(http.StatusBadRequest, gin.H{"msg": err.Error()}) return } - c.JSON(200, gin.H{"name": person.Name, "uuid": person.ID}) + c.JSON(http.StatusOK, gin.H{"name": person.Name, "uuid": person.ID}) }) route.Run(":8088") } @@ -948,6 +976,8 @@ package main import ( "fmt" + "net/http" + "github.com/gin-gonic/gin" ) @@ -962,11 +992,11 @@ func main() { h := testHeader{} if err := c.ShouldBindHeader(&h); err != nil { - c.JSON(200, err) + c.JSON(http.StatusOK, err) } fmt.Printf("%#v\n", h) - c.JSON(200, gin.H{"Rate": h.Rate, "Domain": h.Domain}) + c.JSON(http.StatusOK, gin.H{"Rate": h.Rate, "Domain": h.Domain}) }) r.Run() @@ -996,7 +1026,7 @@ type myForm struct { func formHandler(c *gin.Context) { var fakeForm myForm c.ShouldBind(&fakeForm) - c.JSON(200, gin.H{"color": fakeForm.Colors}) + c.JSON(http.StatusOK, gin.H{"color": fakeForm.Colors}) } ... @@ -1201,14 +1231,14 @@ func main() { // Serves unicode entities r.GET("/json", func(c *gin.Context) { - c.JSON(200, gin.H{ + c.JSON(http.StatusOK, gin.H{ "html": "Hello, world!", }) }) // Serves literal characters r.GET("/purejson", func(c *gin.Context) { - c.PureJSON(200, gin.H{ + c.PureJSON(http.StatusOK, gin.H{ "html": "Hello, world!", }) }) @@ -1226,7 +1256,8 @@ func main() { router.Static("/assets", "./assets") router.StaticFS("/more_static", http.Dir("my_file_system")) router.StaticFile("/favicon.ico", "./resources/favicon.ico") - + router.StaticFileFS("/more_favicon.ico", "more_favicon.ico", http.Dir("my_file_system")) + // Listen and serve on 0.0.0.0:8080 router.Run(":8080") } @@ -1392,7 +1423,7 @@ import ( func formatAsDate(t time.Time) string { year, month, day := t.Date() - return fmt.Sprintf("%d%02d/%02d", year, month, day) + return fmt.Sprintf("%d/%02d/%02d", year, month, day) } func main() { @@ -1454,7 +1485,7 @@ r.GET("/test", func(c *gin.Context) { r.HandleContext(c) }) r.GET("/test2", func(c *gin.Context) { - c.JSON(200, gin.H{"hello": "world"}) + c.JSON(http.StatusOK, gin.H{"hello": "world"}) }) ``` @@ -1607,6 +1638,7 @@ package main import ( "log" + "net/http" "github.com/gin-gonic/autotls" "github.com/gin-gonic/gin" @@ -1617,7 +1649,7 @@ func main() { // Ping handler r.GET("/ping", func(c *gin.Context) { - c.String(200, "pong") + c.String(http.StatusOK, "pong") }) log.Fatal(autotls.Run(r, "example1.com", "example2.com")) @@ -1631,6 +1663,7 @@ package main import ( "log" + "net/http" "github.com/gin-gonic/autotls" "github.com/gin-gonic/gin" @@ -1642,7 +1675,7 @@ func main() { // Ping handler r.GET("/ping", func(c *gin.Context) { - c.String(200, "pong") + c.String(http.StatusOK, "pong") }) m := autocert.Manager{ @@ -1811,7 +1844,7 @@ func main() { quit := make(chan os.Signal) // kill (no param) default send syscall.SIGTERM // kill -2 is syscall.SIGINT - // kill -9 is syscall.SIGKILL but can't be catch, so don't need add it + // kill -9 is syscall.SIGKILL but can't be caught, so don't need to add it signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM) <-quit log.Println("Shutting down server...") @@ -1903,7 +1936,7 @@ type StructD struct { func GetDataB(c *gin.Context) { var b StructB c.Bind(&b) - c.JSON(200, gin.H{ + c.JSON(http.StatusOK, gin.H{ "a": b.NestedStruct, "b": b.FieldB, }) @@ -1912,7 +1945,7 @@ func GetDataB(c *gin.Context) { func GetDataC(c *gin.Context) { var b StructC c.Bind(&b) - c.JSON(200, gin.H{ + c.JSON(http.StatusOK, gin.H{ "a": b.NestedStructPointer, "c": b.FieldC, }) @@ -1921,7 +1954,7 @@ func GetDataC(c *gin.Context) { func GetDataD(c *gin.Context) { var b StructD c.Bind(&b) - c.JSON(200, gin.H{ + c.JSON(http.StatusOK, gin.H{ "x": b.NestedAnonyStruct, "d": b.FieldD, }) @@ -1984,7 +2017,7 @@ func SomeHandler(c *gin.Context) { objA := formA{} objB := formB{} // This reads c.Request.Body and stores the result into the context. - if errA := c.ShouldBindBodyWith(&objA, binding.JSON); errA == nil { + if errA := c.ShouldBindBodyWith(&objA, binding.Form); errA == nil { c.String(http.StatusOK, `the body should be formA`) // At this time, it reuses body stored in the context. } else if errB := c.ShouldBindBodyWith(&objB, binding.JSON); errB == nil { @@ -2006,6 +2039,61 @@ enough to call binding at once. can be called by `c.ShouldBind()` multiple times without any damage to performance (See [#1341](https://github.com/gin-gonic/gin/pull/1341)). +### Bind form-data request with custom struct and custom tag + +```go +const ( + customerTag = "url" + defaultMemory = 32 << 20 +) + +type customerBinding struct {} + +func (customerBinding) Name() string { + return "form" +} + +func (customerBinding) Bind(req *http.Request, obj interface{}) error { + if err := req.ParseForm(); err != nil { + return err + } + if err := req.ParseMultipartForm(defaultMemory); err != nil { + if err != http.ErrNotMultipart { + return err + } + } + if err := binding.MapFormWithTag(obj, req.Form, customerTag); err != nil { + return err + } + return validate(obj) +} + +func validate(obj interface{}) error { + if binding.Validator == nil { + return nil + } + return binding.Validator.ValidateStruct(obj) +} + +// Now we can do this!!! +// FormA is a external type that we can't modify it's tag +type FormA struct { + FieldA string `url:"field_a"` +} + +func ListHandler(s *Service) func(ctx *gin.Context) { + return func(ctx *gin.Context) { + var urlBinding = customerBinding{} + var opt FormA + err := ctx.MustBindWith(&opt, urlBinding) + if err != nil { + ... + } + ... + } +} +``` + ### http2 server push http.Pusher is supported only **go1.8+**. See the [golang blog](https://blog.golang.org/h2push) for detail information. @@ -2016,6 +2104,7 @@ package main import ( "html/template" "log" + "net/http" "github.com/gin-gonic/gin" ) @@ -2044,7 +2133,7 @@ func main() { log.Printf("Failed to push: %v", err) } } - c.HTML(200, "https", gin.H{ + c.HTML(http.StatusOK, "https", gin.H{ "status": "success", }) }) @@ -2200,10 +2289,16 @@ The `net/http/httptest` package is preferable way for HTTP testing. ```go package main +import ( + "net/http" + + "github.com/gin-gonic/gin" +) + func setupRouter() *gin.Engine { r := gin.Default() r.GET("/ping", func(c *gin.Context) { - c.String(200, "pong") + c.String(http.StatusOK, "pong") }) return r } @@ -2231,10 +2326,10 @@ func TestPingRoute(t *testing.T) { router := setupRouter() w := httptest.NewRecorder() - req, _ := http.NewRequest("GET", "/ping", nil) + req, _ := http.NewRequest(http.MethodGet, "/ping", nil) router.ServeHTTP(w, req) - assert.Equal(t, 200, w.Code) + assert.Equal(t, http.StatusOK, w.Code) assert.Equal(t, "pong", w.Body.String()) } ``` @@ -2250,3 +2345,4 @@ Awesome project lists using [Gin](https://github.com/gin-gonic/gin) web framewor * [picfit](https://github.com/thoas/picfit): An image resizing server written in Go. * [brigade](https://github.com/brigadecore/brigade): Event-based Scripting for Kubernetes. * [dkron](https://github.com/distribworks/dkron): Distributed, fault tolerant job scheduling system. + diff --git a/index/server/vendor/github.com/gin-gonic/gin/any.go b/index/server/vendor/github.com/gin-gonic/gin/any.go new file mode 100644 index 00000000..42b1ea46 --- /dev/null +++ b/index/server/vendor/github.com/gin-gonic/gin/any.go @@ -0,0 +1,10 @@ +// Copyright 2022 Gin Core Team. All rights reserved. +// Use of this source code is governed by a MIT style +// license that can be found in the LICENSE file. + +//go:build !go1.18 +// +build !go1.18 + +package gin + +type any = interface{} diff --git a/index/server/vendor/github.com/gin-gonic/gin/auth.go b/index/server/vendor/github.com/gin-gonic/gin/auth.go index 4d8a6ce4..2503c515 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/auth.go +++ b/index/server/vendor/github.com/gin-gonic/gin/auth.go @@ -1,4 +1,4 @@ -// Copyright 2014 Manu Martinez-Almeida. All rights reserved. +// Copyright 2014 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. @@ -31,7 +31,7 @@ func (a authPairs) searchCredential(authValue string) (string, bool) { return "", false } for _, pair := range a { - if subtle.ConstantTimeCompare([]byte(pair.value), []byte(authValue)) == 1 { + if subtle.ConstantTimeCompare(bytesconv.StringToBytes(pair.value), bytesconv.StringToBytes(authValue)) == 1 { return pair.user, true } } diff --git a/index/server/vendor/github.com/gin-gonic/gin/binding/any.go b/index/server/vendor/github.com/gin-gonic/gin/binding/any.go new file mode 100644 index 00000000..d8251a7c --- /dev/null +++ b/index/server/vendor/github.com/gin-gonic/gin/binding/any.go @@ -0,0 +1,10 @@ +// Copyright 2022 Gin Core Team. All rights reserved. +// Use of this source code is governed by a MIT style +// license that can be found in the LICENSE file. + +//go:build !go1.18 +// +build !go1.18 + +package binding + +type any = interface{} diff --git a/index/server/vendor/github.com/gin-gonic/gin/binding/binding.go b/index/server/vendor/github.com/gin-gonic/gin/binding/binding.go index 5caeb581..a58924ed 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/binding/binding.go +++ b/index/server/vendor/github.com/gin-gonic/gin/binding/binding.go @@ -1,4 +1,4 @@ -// Copyright 2014 Manu Martinez-Almeida. All rights reserved. +// Copyright 2014 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. @@ -22,6 +22,7 @@ const ( MIMEMSGPACK = "application/x-msgpack" MIMEMSGPACK2 = "application/msgpack" MIMEYAML = "application/x-yaml" + MIMETOML = "application/toml" ) // Binding describes the interface which needs to be implemented for binding the @@ -29,27 +30,27 @@ const ( // the form POST. type Binding interface { Name() string - Bind(*http.Request, interface{}) error + Bind(*http.Request, any) error } // BindingBody adds BindBody method to Binding. BindBody is similar with Bind, // but it reads the body from supplied bytes instead of req.Body. type BindingBody interface { Binding - BindBody([]byte, interface{}) error + BindBody([]byte, any) error } // BindingUri adds BindUri method to Binding. BindUri is similar with Bind, -// but it read the Params. +// but it reads the Params. type BindingUri interface { Name() string - BindUri(map[string][]string, interface{}) error + BindUri(map[string][]string, any) error } // StructValidator is the minimal interface which needs to be implemented in // order for it to be used as the validator engine for ensuring the correctness // of the request. Gin provides a default implementation for this using -// https://github.com/go-playground/validator/tree/v8.18.2. +// https://github.com/go-playground/validator/tree/v10.6.1. type StructValidator interface { // ValidateStruct can receive any kind of type and it should never panic, even if the configuration is not right. // If the received type is a slice|array, the validation should be performed travel on every element. @@ -57,15 +58,15 @@ type StructValidator interface { // If the received type is a struct or pointer to a struct, the validation should be performed. // If the struct is not valid or the validation itself fails, a descriptive error should be returned. // Otherwise nil must be returned. - ValidateStruct(interface{}) error + ValidateStruct(any) error // Engine returns the underlying validator engine which powers the // StructValidator implementation. - Engine() interface{} + Engine() any } // Validator is the default validator which implements the StructValidator -// interface. It uses https://github.com/go-playground/validator/tree/v8.18.2 +// interface. It uses https://github.com/go-playground/validator/tree/v10.6.1 // under the hood. var Validator StructValidator = &defaultValidator{} @@ -83,6 +84,7 @@ var ( YAML = yamlBinding{} Uri = uriBinding{} Header = headerBinding{} + TOML = tomlBinding{} ) // Default returns the appropriate Binding instance based on the HTTP method @@ -103,6 +105,8 @@ func Default(method, contentType string) Binding { return MsgPack case MIMEYAML: return YAML + case MIMETOML: + return TOML case MIMEMultipartPOSTForm: return FormMultipart default: // case MIMEPOSTForm: @@ -110,7 +114,7 @@ func Default(method, contentType string) Binding { } } -func validate(obj interface{}) error { +func validate(obj any) error { if Validator == nil { return nil } diff --git a/index/server/vendor/github.com/gin-gonic/gin/binding/binding_nomsgpack.go b/index/server/vendor/github.com/gin-gonic/gin/binding/binding_nomsgpack.go index 9afa3dcf..7f6a904a 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/binding/binding_nomsgpack.go +++ b/index/server/vendor/github.com/gin-gonic/gin/binding/binding_nomsgpack.go @@ -20,6 +20,7 @@ const ( MIMEMultipartPOSTForm = "multipart/form-data" MIMEPROTOBUF = "application/x-protobuf" MIMEYAML = "application/x-yaml" + MIMETOML = "application/toml" ) // Binding describes the interface which needs to be implemented for binding the @@ -27,42 +28,42 @@ const ( // the form POST. type Binding interface { Name() string - Bind(*http.Request, interface{}) error + Bind(*http.Request, any) error } // BindingBody adds BindBody method to Binding. BindBody is similar with Bind, // but it reads the body from supplied bytes instead of req.Body. type BindingBody interface { Binding - BindBody([]byte, interface{}) error + BindBody([]byte, any) error } // BindingUri adds BindUri method to Binding. BindUri is similar with Bind, -// but it read the Params. +// but it reads the Params. type BindingUri interface { Name() string - BindUri(map[string][]string, interface{}) error + BindUri(map[string][]string, any) error } // StructValidator is the minimal interface which needs to be implemented in // order for it to be used as the validator engine for ensuring the correctness // of the request. Gin provides a default implementation for this using -// https://github.com/go-playground/validator/tree/v8.18.2. +// https://github.com/go-playground/validator/tree/v10.6.1. type StructValidator interface { // ValidateStruct can receive any kind of type and it should never panic, even if the configuration is not right. // If the received type is not a struct, any validation should be skipped and nil must be returned. // If the received type is a struct or pointer to a struct, the validation should be performed. // If the struct is not valid or the validation itself fails, a descriptive error should be returned. // Otherwise nil must be returned. - ValidateStruct(interface{}) error + ValidateStruct(any) error // Engine returns the underlying validator engine which powers the // StructValidator implementation. - Engine() interface{} + Engine() any } // Validator is the default validator which implements the StructValidator -// interface. It uses https://github.com/go-playground/validator/tree/v8.18.2 +// interface. It uses https://github.com/go-playground/validator/tree/v10.6.1 // under the hood. var Validator StructValidator = &defaultValidator{} @@ -79,6 +80,7 @@ var ( YAML = yamlBinding{} Uri = uriBinding{} Header = headerBinding{} + TOML = tomlBinding{} ) // Default returns the appropriate Binding instance based on the HTTP method @@ -99,12 +101,14 @@ func Default(method, contentType string) Binding { return YAML case MIMEMultipartPOSTForm: return FormMultipart + case MIMETOML: + return TOML default: // case MIMEPOSTForm: return Form } } -func validate(obj interface{}) error { +func validate(obj any) error { if Validator == nil { return nil } diff --git a/index/server/vendor/github.com/gin-gonic/gin/binding/default_validator.go b/index/server/vendor/github.com/gin-gonic/gin/binding/default_validator.go index c57a120f..c03afe75 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/binding/default_validator.go +++ b/index/server/vendor/github.com/gin-gonic/gin/binding/default_validator.go @@ -1,4 +1,4 @@ -// Copyright 2017 Manu Martinez-Almeida. All rights reserved. +// Copyright 2017 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. @@ -18,23 +18,35 @@ type defaultValidator struct { validate *validator.Validate } -type sliceValidateError []error +type SliceValidationError []error -func (err sliceValidateError) Error() string { - var errMsgs []string - for i, e := range err { - if e == nil { - continue +// Error concatenates all error elements in SliceValidationError into a single string separated by \n. +func (err SliceValidationError) Error() string { + n := len(err) + switch n { + case 0: + return "" + default: + var b strings.Builder + if err[0] != nil { + fmt.Fprintf(&b, "[%d]: %s", 0, err[0].Error()) + } + if n > 1 { + for i := 1; i < n; i++ { + if err[i] != nil { + b.WriteString("\n") + fmt.Fprintf(&b, "[%d]: %s", i, err[i].Error()) + } + } } - errMsgs = append(errMsgs, fmt.Sprintf("[%d]: %s", i, e.Error())) + return b.String() } - return strings.Join(errMsgs, "\n") } var _ StructValidator = &defaultValidator{} // ValidateStruct receives any kind of type, but only performed struct or pointer to struct type. -func (v *defaultValidator) ValidateStruct(obj interface{}) error { +func (v *defaultValidator) ValidateStruct(obj any) error { if obj == nil { return nil } @@ -47,7 +59,7 @@ func (v *defaultValidator) ValidateStruct(obj interface{}) error { return v.validateStruct(obj) case reflect.Slice, reflect.Array: count := value.Len() - validateRet := make(sliceValidateError, 0) + validateRet := make(SliceValidationError, 0) for i := 0; i < count; i++ { if err := v.ValidateStruct(value.Index(i).Interface()); err != nil { validateRet = append(validateRet, err) @@ -63,7 +75,7 @@ func (v *defaultValidator) ValidateStruct(obj interface{}) error { } // validateStruct receives struct type -func (v *defaultValidator) validateStruct(obj interface{}) error { +func (v *defaultValidator) validateStruct(obj any) error { v.lazyinit() return v.validate.Struct(obj) } @@ -71,8 +83,8 @@ func (v *defaultValidator) validateStruct(obj interface{}) error { // Engine returns the underlying validator engine which powers the default // Validator instance. This is useful if you want to register custom validations // or struct level validations. See validator GoDoc for more info - -// https://godoc.org/gopkg.in/go-playground/validator.v8 -func (v *defaultValidator) Engine() interface{} { +// https://pkg.go.dev/github.com/go-playground/validator/v10 +func (v *defaultValidator) Engine() any { v.lazyinit() return v.validate } diff --git a/index/server/vendor/github.com/gin-gonic/gin/binding/form.go b/index/server/vendor/github.com/gin-gonic/gin/binding/form.go index b93c34cf..b17352ba 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/binding/form.go +++ b/index/server/vendor/github.com/gin-gonic/gin/binding/form.go @@ -1,10 +1,11 @@ -// Copyright 2014 Manu Martinez-Almeida. All rights reserved. +// Copyright 2014 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. package binding import ( + "errors" "net/http" ) @@ -18,14 +19,12 @@ func (formBinding) Name() string { return "form" } -func (formBinding) Bind(req *http.Request, obj interface{}) error { +func (formBinding) Bind(req *http.Request, obj any) error { if err := req.ParseForm(); err != nil { return err } - if err := req.ParseMultipartForm(defaultMemory); err != nil { - if err != http.ErrNotMultipart { - return err - } + if err := req.ParseMultipartForm(defaultMemory); err != nil && !errors.Is(err, http.ErrNotMultipart) { + return err } if err := mapForm(obj, req.Form); err != nil { return err @@ -37,7 +36,7 @@ func (formPostBinding) Name() string { return "form-urlencoded" } -func (formPostBinding) Bind(req *http.Request, obj interface{}) error { +func (formPostBinding) Bind(req *http.Request, obj any) error { if err := req.ParseForm(); err != nil { return err } @@ -51,7 +50,7 @@ func (formMultipartBinding) Name() string { return "multipart/form-data" } -func (formMultipartBinding) Bind(req *http.Request, obj interface{}) error { +func (formMultipartBinding) Bind(req *http.Request, obj any) error { if err := req.ParseMultipartForm(defaultMemory); err != nil { return err } diff --git a/index/server/vendor/github.com/gin-gonic/gin/binding/form_mapping.go b/index/server/vendor/github.com/gin-gonic/gin/binding/form_mapping.go index 2f4e45b4..98cebfec 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/binding/form_mapping.go +++ b/index/server/vendor/github.com/gin-gonic/gin/binding/form_mapping.go @@ -1,4 +1,4 @@ -// Copyright 2014 Manu Martinez-Almeida. All rights reserved. +// Copyright 2014 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. @@ -16,22 +16,34 @@ import ( "github.com/gin-gonic/gin/internal/json" ) -var errUnknownType = errors.New("unknown type") +var ( + errUnknownType = errors.New("unknown type") -func mapUri(ptr interface{}, m map[string][]string) error { + // ErrConvertMapStringSlice can not covert to map[string][]string + ErrConvertMapStringSlice = errors.New("can not convert to map slices of strings") + + // ErrConvertToMapString can not convert to map[string]string + ErrConvertToMapString = errors.New("can not convert to map of strings") +) + +func mapURI(ptr any, m map[string][]string) error { return mapFormByTag(ptr, m, "uri") } -func mapForm(ptr interface{}, form map[string][]string) error { +func mapForm(ptr any, form map[string][]string) error { return mapFormByTag(ptr, form, "form") } +func MapFormWithTag(ptr any, form map[string][]string, tag string) error { + return mapFormByTag(ptr, form, tag) +} + var emptyField = reflect.StructField{} -func mapFormByTag(ptr interface{}, form map[string][]string, tag string) error { +func mapFormByTag(ptr any, form map[string][]string, tag string) error { // Check if ptr is a map ptrVal := reflect.ValueOf(ptr) - var pointed interface{} + var pointed any if ptrVal.Kind() == reflect.Ptr { ptrVal = ptrVal.Elem() pointed = ptrVal.Interface() @@ -49,7 +61,7 @@ func mapFormByTag(ptr interface{}, form map[string][]string, tag string) error { // setter tries to set value on a walking by fields of a struct type setter interface { - TrySet(value reflect.Value, field reflect.StructField, key string, opt setOptions) (isSetted bool, err error) + TrySet(value reflect.Value, field reflect.StructField, key string, opt setOptions) (isSet bool, err error) } type formSource map[string][]string @@ -57,11 +69,11 @@ type formSource map[string][]string var _ setter = formSource(nil) // TrySet tries to set a value by request's form source (like map[string][]string) -func (form formSource) TrySet(value reflect.Value, field reflect.StructField, tagValue string, opt setOptions) (isSetted bool, err error) { +func (form formSource) TrySet(value reflect.Value, field reflect.StructField, tagValue string, opt setOptions) (isSet bool, err error) { return setByForm(value, field, form, tagValue, opt) } -func mappingByPtr(ptr interface{}, setter setter, tag string) error { +func mappingByPtr(ptr any, setter setter, tag string) error { _, err := mapping(reflect.ValueOf(ptr), emptyField, setter, tag) return err } @@ -71,7 +83,7 @@ func mapping(value reflect.Value, field reflect.StructField, setter setter, tag return false, nil } - var vKind = value.Kind() + vKind := value.Kind() if vKind == reflect.Ptr { var isNew bool @@ -80,14 +92,14 @@ func mapping(value reflect.Value, field reflect.StructField, setter setter, tag isNew = true vPtr = reflect.New(value.Type().Elem()) } - isSetted, err := mapping(vPtr.Elem(), field, setter, tag) + isSet, err := mapping(vPtr.Elem(), field, setter, tag) if err != nil { return false, err } - if isNew && isSetted { + if isNew && isSet { value.Set(vPtr) } - return isSetted, nil + return isSet, nil } if vKind != reflect.Struct || !field.Anonymous { @@ -103,19 +115,19 @@ func mapping(value reflect.Value, field reflect.StructField, setter setter, tag if vKind == reflect.Struct { tValue := value.Type() - var isSetted bool + var isSet bool for i := 0; i < value.NumField(); i++ { sf := tValue.Field(i) if sf.PkgPath != "" && !sf.Anonymous { // unexported continue } - ok, err := mapping(value.Field(i), tValue.Field(i), setter, tag) + ok, err := mapping(value.Field(i), sf, setter, tag) if err != nil { return false, err } - isSetted = isSetted || ok + isSet = isSet || ok } - return isSetted, nil + return isSet, nil } return false, nil } @@ -152,7 +164,7 @@ func tryToSetValue(value reflect.Value, field reflect.StructField, setter setter return setter.TrySet(value, field, tagValue, setOpt) } -func setByForm(value reflect.Value, field reflect.StructField, form map[string][]string, tagValue string, opt setOptions) (isSetted bool, err error) { +func setByForm(value reflect.Value, field reflect.StructField, form map[string][]string, tagValue string, opt setOptions) (isSet bool, err error) { vs, ok := form[tagValue] if !ok && !opt.isDefaultExists { return false, nil @@ -198,7 +210,7 @@ func setWithProperType(val string, value reflect.Value, field reflect.StructFiel case reflect.Int64: switch value.Interface().(type) { case time.Duration: - return setTimeDuration(val, value, field) + return setTimeDuration(val, value) } return setIntField(val, 64, value) case reflect.Uint: @@ -298,7 +310,6 @@ func setTimeField(val string, structField reflect.StructField, value reflect.Val t := time.Unix(tv/int64(d), tv%int64(d)) value.Set(reflect.ValueOf(t)) return nil - } if val == "" { @@ -348,7 +359,7 @@ func setSlice(vals []string, value reflect.Value, field reflect.StructField) err return nil } -func setTimeDuration(val string, value reflect.Value, field reflect.StructField) error { +func setTimeDuration(val string, value reflect.Value) error { d, err := time.ParseDuration(val) if err != nil { return err @@ -365,13 +376,13 @@ func head(str, sep string) (head string, tail string) { return str[:idx], str[idx+len(sep):] } -func setFormMap(ptr interface{}, form map[string][]string) error { +func setFormMap(ptr any, form map[string][]string) error { el := reflect.TypeOf(ptr).Elem() if el.Kind() == reflect.Slice { ptrMap, ok := ptr.(map[string][]string) if !ok { - return errors.New("cannot convert to map slices of strings") + return ErrConvertMapStringSlice } for k, v := range form { ptrMap[k] = v @@ -382,7 +393,7 @@ func setFormMap(ptr interface{}, form map[string][]string) error { ptrMap, ok := ptr.(map[string]string) if !ok { - return errors.New("cannot convert to map of strings") + return ErrConvertToMapString } for k, v := range form { ptrMap[k] = v[len(v)-1] // pick last diff --git a/index/server/vendor/github.com/gin-gonic/gin/binding/header.go b/index/server/vendor/github.com/gin-gonic/gin/binding/header.go index 179ce4ea..03bc78da 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/binding/header.go +++ b/index/server/vendor/github.com/gin-gonic/gin/binding/header.go @@ -1,3 +1,7 @@ +// Copyright 2022 Gin Core Team. All rights reserved. +// Use of this source code is governed by a MIT style +// license that can be found in the LICENSE file. + package binding import ( @@ -12,7 +16,7 @@ func (headerBinding) Name() string { return "header" } -func (headerBinding) Bind(req *http.Request, obj interface{}) error { +func (headerBinding) Bind(req *http.Request, obj any) error { if err := mapHeader(obj, req.Header); err != nil { return err @@ -21,7 +25,7 @@ func (headerBinding) Bind(req *http.Request, obj interface{}) error { return validate(obj) } -func mapHeader(ptr interface{}, h map[string][]string) error { +func mapHeader(ptr any, h map[string][]string) error { return mappingByPtr(ptr, headerSource(h), "header") } @@ -29,6 +33,6 @@ type headerSource map[string][]string var _ setter = headerSource(nil) -func (hs headerSource) TrySet(value reflect.Value, field reflect.StructField, tagValue string, opt setOptions) (isSetted bool, err error) { +func (hs headerSource) TrySet(value reflect.Value, field reflect.StructField, tagValue string, opt setOptions) (bool, error) { return setByForm(value, field, hs, textproto.CanonicalMIMEHeaderKey(tagValue), opt) } diff --git a/index/server/vendor/github.com/gin-gonic/gin/binding/json.go b/index/server/vendor/github.com/gin-gonic/gin/binding/json.go index d62e0705..36eb27a3 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/binding/json.go +++ b/index/server/vendor/github.com/gin-gonic/gin/binding/json.go @@ -1,4 +1,4 @@ -// Copyright 2014 Manu Martinez-Almeida. All rights reserved. +// Copyright 2014 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. @@ -6,7 +6,7 @@ package binding import ( "bytes" - "fmt" + "errors" "io" "net/http" @@ -30,18 +30,18 @@ func (jsonBinding) Name() string { return "json" } -func (jsonBinding) Bind(req *http.Request, obj interface{}) error { +func (jsonBinding) Bind(req *http.Request, obj any) error { if req == nil || req.Body == nil { - return fmt.Errorf("invalid request") + return errors.New("invalid request") } return decodeJSON(req.Body, obj) } -func (jsonBinding) BindBody(body []byte, obj interface{}) error { +func (jsonBinding) BindBody(body []byte, obj any) error { return decodeJSON(bytes.NewReader(body), obj) } -func decodeJSON(r io.Reader, obj interface{}) error { +func decodeJSON(r io.Reader, obj any) error { decoder := json.NewDecoder(r) if EnableDecoderUseNumber { decoder.UseNumber() diff --git a/index/server/vendor/github.com/gin-gonic/gin/binding/msgpack.go b/index/server/vendor/github.com/gin-gonic/gin/binding/msgpack.go index 2a442996..d1f035e4 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/binding/msgpack.go +++ b/index/server/vendor/github.com/gin-gonic/gin/binding/msgpack.go @@ -1,4 +1,4 @@ -// Copyright 2017 Manu Martinez-Almeida. All rights reserved. +// Copyright 2017 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. @@ -21,15 +21,15 @@ func (msgpackBinding) Name() string { return "msgpack" } -func (msgpackBinding) Bind(req *http.Request, obj interface{}) error { +func (msgpackBinding) Bind(req *http.Request, obj any) error { return decodeMsgPack(req.Body, obj) } -func (msgpackBinding) BindBody(body []byte, obj interface{}) error { +func (msgpackBinding) BindBody(body []byte, obj any) error { return decodeMsgPack(bytes.NewReader(body), obj) } -func decodeMsgPack(r io.Reader, obj interface{}) error { +func decodeMsgPack(r io.Reader, obj any) error { cdc := new(codec.MsgpackHandle) if err := codec.NewDecoder(r, cdc).Decode(&obj); err != nil { return err diff --git a/index/server/vendor/github.com/gin-gonic/gin/binding/multipart_form_mapping.go b/index/server/vendor/github.com/gin-gonic/gin/binding/multipart_form_mapping.go index f85a1aa6..4ebe8326 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/binding/multipart_form_mapping.go +++ b/index/server/vendor/github.com/gin-gonic/gin/binding/multipart_form_mapping.go @@ -1,4 +1,4 @@ -// Copyright 2019 Gin Core Team. All rights reserved. +// Copyright 2019 Gin Core Team. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. @@ -15,8 +15,16 @@ type multipartRequest http.Request var _ setter = (*multipartRequest)(nil) +var ( + // ErrMultiFileHeader multipart.FileHeader invalid + ErrMultiFileHeader = errors.New("unsupported field type for multipart.FileHeader") + + // ErrMultiFileHeaderLenInvalid array for []*multipart.FileHeader len invalid + ErrMultiFileHeaderLenInvalid = errors.New("unsupported len of array for []*multipart.FileHeader") +) + // TrySet tries to set a value by the multipart request with the binding a form file -func (r *multipartRequest) TrySet(value reflect.Value, field reflect.StructField, key string, opt setOptions) (isSetted bool, err error) { +func (r *multipartRequest) TrySet(value reflect.Value, field reflect.StructField, key string, opt setOptions) (bool, error) { if files := r.MultipartForm.File[key]; len(files) != 0 { return setByMultipartFormFile(value, field, files) } @@ -24,7 +32,7 @@ func (r *multipartRequest) TrySet(value reflect.Value, field reflect.StructField return setByForm(value, field, r.MultipartForm.Value, key, opt) } -func setByMultipartFormFile(value reflect.Value, field reflect.StructField, files []*multipart.FileHeader) (isSetted bool, err error) { +func setByMultipartFormFile(value reflect.Value, field reflect.StructField, files []*multipart.FileHeader) (isSet bool, err error) { switch value.Kind() { case reflect.Ptr: switch value.Interface().(type) { @@ -40,26 +48,26 @@ func setByMultipartFormFile(value reflect.Value, field reflect.StructField, file } case reflect.Slice: slice := reflect.MakeSlice(value.Type(), len(files), len(files)) - isSetted, err = setArrayOfMultipartFormFiles(slice, field, files) - if err != nil || !isSetted { - return isSetted, err + isSet, err = setArrayOfMultipartFormFiles(slice, field, files) + if err != nil || !isSet { + return isSet, err } value.Set(slice) return true, nil case reflect.Array: return setArrayOfMultipartFormFiles(value, field, files) } - return false, errors.New("unsupported field type for multipart.FileHeader") + return false, ErrMultiFileHeader } -func setArrayOfMultipartFormFiles(value reflect.Value, field reflect.StructField, files []*multipart.FileHeader) (isSetted bool, err error) { +func setArrayOfMultipartFormFiles(value reflect.Value, field reflect.StructField, files []*multipart.FileHeader) (isSet bool, err error) { if value.Len() != len(files) { - return false, errors.New("unsupported len of array for []*multipart.FileHeader") + return false, ErrMultiFileHeaderLenInvalid } for i := range files { - setted, err := setByMultipartFormFile(value.Index(i), field, files[i:i+1]) - if err != nil || !setted { - return setted, err + set, err := setByMultipartFormFile(value.Index(i), field, files[i:i+1]) + if err != nil || !set { + return set, err } } return true, nil diff --git a/index/server/vendor/github.com/gin-gonic/gin/binding/protobuf.go b/index/server/vendor/github.com/gin-gonic/gin/binding/protobuf.go index f9ece928..44f2fdb9 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/binding/protobuf.go +++ b/index/server/vendor/github.com/gin-gonic/gin/binding/protobuf.go @@ -1,14 +1,15 @@ -// Copyright 2014 Manu Martinez-Almeida. All rights reserved. +// Copyright 2014 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. package binding import ( + "errors" "io/ioutil" "net/http" - "github.com/golang/protobuf/proto" + "google.golang.org/protobuf/proto" ) type protobufBinding struct{} @@ -17,7 +18,7 @@ func (protobufBinding) Name() string { return "protobuf" } -func (b protobufBinding) Bind(req *http.Request, obj interface{}) error { +func (b protobufBinding) Bind(req *http.Request, obj any) error { buf, err := ioutil.ReadAll(req.Body) if err != nil { return err @@ -25,8 +26,12 @@ func (b protobufBinding) Bind(req *http.Request, obj interface{}) error { return b.BindBody(buf, obj) } -func (protobufBinding) BindBody(body []byte, obj interface{}) error { - if err := proto.Unmarshal(body, obj.(proto.Message)); err != nil { +func (protobufBinding) BindBody(body []byte, obj any) error { + msg, ok := obj.(proto.Message) + if !ok { + return errors.New("obj is not ProtoMessage") + } + if err := proto.Unmarshal(body, msg); err != nil { return err } // Here it's same to return validate(obj), but util now we can't add diff --git a/index/server/vendor/github.com/gin-gonic/gin/binding/query.go b/index/server/vendor/github.com/gin-gonic/gin/binding/query.go index 219743f2..c958b88b 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/binding/query.go +++ b/index/server/vendor/github.com/gin-gonic/gin/binding/query.go @@ -1,4 +1,4 @@ -// Copyright 2017 Manu Martinez-Almeida. All rights reserved. +// Copyright 2017 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. @@ -12,7 +12,7 @@ func (queryBinding) Name() string { return "query" } -func (queryBinding) Bind(req *http.Request, obj interface{}) error { +func (queryBinding) Bind(req *http.Request, obj any) error { values := req.URL.Query() if err := mapForm(obj, values); err != nil { return err diff --git a/index/server/vendor/github.com/gin-gonic/gin/binding/toml.go b/index/server/vendor/github.com/gin-gonic/gin/binding/toml.go new file mode 100644 index 00000000..a6b8a90a --- /dev/null +++ b/index/server/vendor/github.com/gin-gonic/gin/binding/toml.go @@ -0,0 +1,35 @@ +// Copyright 2022 Gin Core Team. All rights reserved. +// Use of this source code is governed by a MIT style +// license that can be found in the LICENSE file. + +package binding + +import ( + "bytes" + "io" + "net/http" + + "github.com/pelletier/go-toml/v2" +) + +type tomlBinding struct{} + +func (tomlBinding) Name() string { + return "toml" +} + +func decodeToml(r io.Reader, obj any) error { + decoder := toml.NewDecoder(r) + if err := decoder.Decode(obj); err != nil { + return err + } + return decoder.Decode(obj) +} + +func (tomlBinding) Bind(req *http.Request, obj any) error { + return decodeToml(req.Body, obj) +} + +func (tomlBinding) BindBody(body []byte, obj any) error { + return decodeToml(bytes.NewReader(body), obj) +} diff --git a/index/server/vendor/github.com/gin-gonic/gin/binding/uri.go b/index/server/vendor/github.com/gin-gonic/gin/binding/uri.go index f91ec381..29151064 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/binding/uri.go +++ b/index/server/vendor/github.com/gin-gonic/gin/binding/uri.go @@ -1,4 +1,4 @@ -// Copyright 2018 Gin Core Team. All rights reserved. +// Copyright 2018 Gin Core Team. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. @@ -10,8 +10,8 @@ func (uriBinding) Name() string { return "uri" } -func (uriBinding) BindUri(m map[string][]string, obj interface{}) error { - if err := mapUri(obj, m); err != nil { +func (uriBinding) BindUri(m map[string][]string, obj any) error { + if err := mapURI(obj, m); err != nil { return err } return validate(obj) diff --git a/index/server/vendor/github.com/gin-gonic/gin/binding/xml.go b/index/server/vendor/github.com/gin-gonic/gin/binding/xml.go index 4e901149..a70f4ad3 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/binding/xml.go +++ b/index/server/vendor/github.com/gin-gonic/gin/binding/xml.go @@ -1,4 +1,4 @@ -// Copyright 2014 Manu Martinez-Almeida. All rights reserved. +// Copyright 2014 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. @@ -17,14 +17,14 @@ func (xmlBinding) Name() string { return "xml" } -func (xmlBinding) Bind(req *http.Request, obj interface{}) error { +func (xmlBinding) Bind(req *http.Request, obj any) error { return decodeXML(req.Body, obj) } -func (xmlBinding) BindBody(body []byte, obj interface{}) error { +func (xmlBinding) BindBody(body []byte, obj any) error { return decodeXML(bytes.NewReader(body), obj) } -func decodeXML(r io.Reader, obj interface{}) error { +func decodeXML(r io.Reader, obj any) error { decoder := xml.NewDecoder(r) if err := decoder.Decode(obj); err != nil { return err diff --git a/index/server/vendor/github.com/gin-gonic/gin/binding/yaml.go b/index/server/vendor/github.com/gin-gonic/gin/binding/yaml.go index a2d36d6a..b0d36a35 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/binding/yaml.go +++ b/index/server/vendor/github.com/gin-gonic/gin/binding/yaml.go @@ -1,4 +1,4 @@ -// Copyright 2018 Gin Core Team. All rights reserved. +// Copyright 2018 Gin Core Team. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. @@ -18,15 +18,15 @@ func (yamlBinding) Name() string { return "yaml" } -func (yamlBinding) Bind(req *http.Request, obj interface{}) error { +func (yamlBinding) Bind(req *http.Request, obj any) error { return decodeYAML(req.Body, obj) } -func (yamlBinding) BindBody(body []byte, obj interface{}) error { +func (yamlBinding) BindBody(body []byte, obj any) error { return decodeYAML(bytes.NewReader(body), obj) } -func decodeYAML(r io.Reader, obj interface{}) error { +func decodeYAML(r io.Reader, obj any) error { decoder := yaml.NewDecoder(r) if err := decoder.Decode(obj); err != nil { return err diff --git a/index/server/vendor/github.com/gin-gonic/gin/context.go b/index/server/vendor/github.com/gin-gonic/gin/context.go index 220d1bc7..b1ad95e6 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/context.go +++ b/index/server/vendor/github.com/gin-gonic/gin/context.go @@ -1,4 +1,4 @@ -// Copyright 2014 Manu Martinez-Almeida. All rights reserved. +// Copyright 2014 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. @@ -6,7 +6,6 @@ package gin import ( "errors" - "fmt" "io" "io/ioutil" "log" @@ -35,12 +34,17 @@ const ( MIMEPOSTForm = binding.MIMEPOSTForm MIMEMultipartPOSTForm = binding.MIMEMultipartPOSTForm MIMEYAML = binding.MIMEYAML + MIMETOML = binding.MIMETOML ) // BodyBytesKey indicates a default body bytes key. const BodyBytesKey = "_gin-gonic/gin/bodybyteskey" -const abortIndex int8 = math.MaxInt8 / 2 +// ContextKey is the key that a Context returns itself for. +const ContextKey = "_gin-gonic/gin/contextkey" + +// abortIndex represents a typical value used in abort functions. +const abortIndex int8 = math.MaxInt8 >> 1 // Context is the most important part of gin. It allows us to pass variables between middleware, // manage the flow, validate the JSON of a request and render a JSON response for example. @@ -58,11 +62,11 @@ type Context struct { params *Params skippedNodes *[]skippedNode - // This mutex protect Keys map + // This mutex protects Keys map. mu sync.RWMutex // Keys is a key/value pair exclusively for the context of each request. - Keys map[string]interface{} + Keys map[string]any // Errors is a list of errors attached to all the handlers/middlewares who used this context. Errors errorMsgs @@ -70,10 +74,10 @@ type Context struct { // Accepted defines a list of manually accepted formats for content negotiation. Accepted []string - // queryCache use url.ParseQuery cached the param query result from c.Request.URL.Query() + // queryCache caches the query result from c.Request.URL.Query(). queryCache url.Values - // formCache use url.ParseQuery cached PostForm contains the parsed form data from POST, PATCH, + // formCache caches c.Request.PostForm, which contains the parsed form data from POST, PATCH, // or PUT body parameters. formCache url.Values @@ -88,16 +92,17 @@ type Context struct { func (c *Context) reset() { c.Writer = &c.writermem - c.Params = c.Params[0:0] + c.Params = c.Params[:0] c.handlers = nil c.index = -1 c.fullPath = "" c.Keys = nil - c.Errors = c.Errors[0:0] + c.Errors = c.Errors[:0] c.Accepted = nil c.queryCache = nil c.formCache = nil + c.sameSite = 0 *c.params = (*c.params)[:0] *c.skippedNodes = (*c.skippedNodes)[:0] } @@ -115,7 +120,7 @@ func (c *Context) Copy() *Context { cp.Writer = &cp.writermem cp.index = abortIndex cp.handlers = nil - cp.Keys = map[string]interface{}{} + cp.Keys = map[string]any{} for k, v := range c.Keys { cp.Keys[k] = v } @@ -194,7 +199,7 @@ func (c *Context) AbortWithStatus(code int) { // AbortWithStatusJSON calls `Abort()` and then `JSON` internally. // This method stops the chain, writes the status code and return a JSON body. // It also sets the Content-Type as "application/json". -func (c *Context) AbortWithStatusJSON(code int, jsonObj interface{}) { +func (c *Context) AbortWithStatusJSON(code int, jsonObj any) { c.Abort() c.JSON(code, jsonObj) } @@ -221,7 +226,8 @@ func (c *Context) Error(err error) *Error { panic("err is nil") } - parsedError, ok := err.(*Error) + var parsedError *Error + ok := errors.As(err, &parsedError) if !ok { parsedError = &Error{ Err: err, @@ -239,10 +245,10 @@ func (c *Context) Error(err error) *Error { // Set is used to store a new key/value pair exclusively for this context. // It also lazy initializes c.Keys if it was not used previously. -func (c *Context) Set(key string, value interface{}) { +func (c *Context) Set(key string, value any) { c.mu.Lock() if c.Keys == nil { - c.Keys = make(map[string]interface{}) + c.Keys = make(map[string]any) } c.Keys[key] = value @@ -250,8 +256,8 @@ func (c *Context) Set(key string, value interface{}) { } // Get returns the value for the given key, ie: (value, true). -// If the value does not exists it returns (nil, false) -func (c *Context) Get(key string) (value interface{}, exists bool) { +// If the value does not exist it returns (nil, false) +func (c *Context) Get(key string) (value any, exists bool) { c.mu.RLock() value, exists = c.Keys[key] c.mu.RUnlock() @@ -259,7 +265,7 @@ func (c *Context) Get(key string) (value interface{}, exists bool) { } // MustGet returns the value for the given key if it exists, otherwise it panics. -func (c *Context) MustGet(key string) interface{} { +func (c *Context) MustGet(key string) any { if value, exists := c.Get(key); exists { return value } @@ -347,9 +353,9 @@ func (c *Context) GetStringSlice(key string) (ss []string) { } // GetStringMap returns the value associated with the key as a map of interfaces. -func (c *Context) GetStringMap(key string) (sm map[string]interface{}) { +func (c *Context) GetStringMap(key string) (sm map[string]any) { if val, ok := c.Get(key); ok && val != nil { - sm, _ = val.(map[string]interface{}) + sm, _ = val.(map[string]any) } return } @@ -384,6 +390,15 @@ func (c *Context) Param(key string) string { return c.Params.ByName(key) } +// AddParam adds param to context and +// replaces path param key with given value for e2e testing purposes +// Example Route: "/user/:id" +// AddParam("id", 1) +// Result: "/user/1" +func (c *Context) AddParam(key, value string) { + c.Params = append(c.Params, Param{Key: key, Value: value}) +} + // Query returns the keyed url query value if it exists, // otherwise it returns an empty string `("")`. // It is shortcut for `c.Request.URL.Query().Get(key)` @@ -392,9 +407,9 @@ func (c *Context) Param(key string) string { // c.Query("name") == "Manu" // c.Query("value") == "" // c.Query("wtf") == "" -func (c *Context) Query(key string) string { - value, _ := c.GetQuery(key) - return value +func (c *Context) Query(key string) (value string) { + value, _ = c.GetQuery(key) + return } // DefaultQuery returns the keyed url query value if it exists, @@ -428,9 +443,9 @@ func (c *Context) GetQuery(key string) (string, bool) { // QueryArray returns a slice of strings for a given query key. // The length of the slice depends on the number of params with the given key. -func (c *Context) QueryArray(key string) []string { - values, _ := c.GetQueryArray(key) - return values +func (c *Context) QueryArray(key string) (values []string) { + values, _ = c.GetQueryArray(key) + return } func (c *Context) initQueryCache() { @@ -445,18 +460,16 @@ func (c *Context) initQueryCache() { // GetQueryArray returns a slice of strings for a given query key, plus // a boolean value whether at least one value exists for the given key. -func (c *Context) GetQueryArray(key string) ([]string, bool) { +func (c *Context) GetQueryArray(key string) (values []string, ok bool) { c.initQueryCache() - if values, ok := c.queryCache[key]; ok && len(values) > 0 { - return values, true - } - return []string{}, false + values, ok = c.queryCache[key] + return } // QueryMap returns a map for a given query key. -func (c *Context) QueryMap(key string) map[string]string { - dicts, _ := c.GetQueryMap(key) - return dicts +func (c *Context) QueryMap(key string) (dicts map[string]string) { + dicts, _ = c.GetQueryMap(key) + return } // GetQueryMap returns a map for a given query key, plus a boolean value @@ -468,9 +481,9 @@ func (c *Context) GetQueryMap(key string) (map[string]string, bool) { // PostForm returns the specified key from a POST urlencoded form or multipart form // when it exists, otherwise it returns an empty string `("")`. -func (c *Context) PostForm(key string) string { - value, _ := c.GetPostForm(key) - return value +func (c *Context) PostForm(key string) (value string) { + value, _ = c.GetPostForm(key) + return } // DefaultPostForm returns the specified key from a POST urlencoded form or multipart form @@ -499,9 +512,9 @@ func (c *Context) GetPostForm(key string) (string, bool) { // PostFormArray returns a slice of strings for a given form key. // The length of the slice depends on the number of params with the given key. -func (c *Context) PostFormArray(key string) []string { - values, _ := c.GetPostFormArray(key) - return values +func (c *Context) PostFormArray(key string) (values []string) { + values, _ = c.GetPostFormArray(key) + return } func (c *Context) initFormCache() { @@ -509,7 +522,7 @@ func (c *Context) initFormCache() { c.formCache = make(url.Values) req := c.Request if err := req.ParseMultipartForm(c.engine.MaxMultipartMemory); err != nil { - if err != http.ErrNotMultipart { + if !errors.Is(err, http.ErrNotMultipart) { debugPrint("error on parse multipart form array: %v", err) } } @@ -519,18 +532,16 @@ func (c *Context) initFormCache() { // GetPostFormArray returns a slice of strings for a given form key, plus // a boolean value whether at least one value exists for the given key. -func (c *Context) GetPostFormArray(key string) ([]string, bool) { +func (c *Context) GetPostFormArray(key string) (values []string, ok bool) { c.initFormCache() - if values := c.formCache[key]; len(values) > 0 { - return values, true - } - return []string{}, false + values, ok = c.formCache[key] + return } // PostFormMap returns a map for a given form key. -func (c *Context) PostFormMap(key string) map[string]string { - dicts, _ := c.GetPostFormMap(key) - return dicts +func (c *Context) PostFormMap(key string) (dicts map[string]string) { + dicts, _ = c.GetPostFormMap(key) + return } // GetPostFormMap returns a map for a given form key, plus a boolean value @@ -594,47 +605,51 @@ func (c *Context) SaveUploadedFile(file *multipart.FileHeader, dst string) error return err } -// Bind checks the Content-Type to select a binding engine automatically, -// Depending the "Content-Type" header different bindings are used: +// Bind checks the Method and Content-Type to select a binding engine automatically, +// Depending on the "Content-Type" header different bindings are used, for example: // "application/json" --> JSON binding // "application/xml" --> XML binding -// otherwise --> returns an error. // It parses the request's body as JSON if Content-Type == "application/json" using JSON or XML as a JSON input. // It decodes the json payload into the struct specified as a pointer. // It writes a 400 error and sets Content-Type header "text/plain" in the response if input is not valid. -func (c *Context) Bind(obj interface{}) error { +func (c *Context) Bind(obj any) error { b := binding.Default(c.Request.Method, c.ContentType()) return c.MustBindWith(obj, b) } // BindJSON is a shortcut for c.MustBindWith(obj, binding.JSON). -func (c *Context) BindJSON(obj interface{}) error { +func (c *Context) BindJSON(obj any) error { return c.MustBindWith(obj, binding.JSON) } // BindXML is a shortcut for c.MustBindWith(obj, binding.BindXML). -func (c *Context) BindXML(obj interface{}) error { +func (c *Context) BindXML(obj any) error { return c.MustBindWith(obj, binding.XML) } // BindQuery is a shortcut for c.MustBindWith(obj, binding.Query). -func (c *Context) BindQuery(obj interface{}) error { +func (c *Context) BindQuery(obj any) error { return c.MustBindWith(obj, binding.Query) } // BindYAML is a shortcut for c.MustBindWith(obj, binding.YAML). -func (c *Context) BindYAML(obj interface{}) error { +func (c *Context) BindYAML(obj any) error { return c.MustBindWith(obj, binding.YAML) } +// BindTOML is a shortcut for c.MustBindWith(obj, binding.TOML). +func (c *Context) BindTOML(obj interface{}) error { + return c.MustBindWith(obj, binding.TOML) +} + // BindHeader is a shortcut for c.MustBindWith(obj, binding.Header). -func (c *Context) BindHeader(obj interface{}) error { +func (c *Context) BindHeader(obj any) error { return c.MustBindWith(obj, binding.Header) } // BindUri binds the passed struct pointer using binding.Uri. // It will abort the request with HTTP 400 if any error occurs. -func (c *Context) BindUri(obj interface{}) error { +func (c *Context) BindUri(obj any) error { if err := c.ShouldBindUri(obj); err != nil { c.AbortWithError(http.StatusBadRequest, err).SetType(ErrorTypeBind) // nolint: errcheck return err @@ -645,7 +660,7 @@ func (c *Context) BindUri(obj interface{}) error { // MustBindWith binds the passed struct pointer using the specified binding engine. // It will abort the request with HTTP 400 if any error occurs. // See the binding package. -func (c *Context) MustBindWith(obj interface{}, b binding.Binding) error { +func (c *Context) MustBindWith(obj any, b binding.Binding) error { if err := c.ShouldBindWith(obj, b); err != nil { c.AbortWithError(http.StatusBadRequest, err).SetType(ErrorTypeBind) // nolint: errcheck return err @@ -653,46 +668,50 @@ func (c *Context) MustBindWith(obj interface{}, b binding.Binding) error { return nil } -// ShouldBind checks the Content-Type to select a binding engine automatically, -// Depending the "Content-Type" header different bindings are used: +// ShouldBind checks the Method and Content-Type to select a binding engine automatically, +// Depending on the "Content-Type" header different bindings are used, for example: // "application/json" --> JSON binding // "application/xml" --> XML binding -// otherwise --> returns an error // It parses the request's body as JSON if Content-Type == "application/json" using JSON or XML as a JSON input. // It decodes the json payload into the struct specified as a pointer. -// Like c.Bind() but this method does not set the response status code to 400 and abort if the json is not valid. -func (c *Context) ShouldBind(obj interface{}) error { +// Like c.Bind() but this method does not set the response status code to 400 or abort if input is not valid. +func (c *Context) ShouldBind(obj any) error { b := binding.Default(c.Request.Method, c.ContentType()) return c.ShouldBindWith(obj, b) } // ShouldBindJSON is a shortcut for c.ShouldBindWith(obj, binding.JSON). -func (c *Context) ShouldBindJSON(obj interface{}) error { +func (c *Context) ShouldBindJSON(obj any) error { return c.ShouldBindWith(obj, binding.JSON) } // ShouldBindXML is a shortcut for c.ShouldBindWith(obj, binding.XML). -func (c *Context) ShouldBindXML(obj interface{}) error { +func (c *Context) ShouldBindXML(obj any) error { return c.ShouldBindWith(obj, binding.XML) } // ShouldBindQuery is a shortcut for c.ShouldBindWith(obj, binding.Query). -func (c *Context) ShouldBindQuery(obj interface{}) error { +func (c *Context) ShouldBindQuery(obj any) error { return c.ShouldBindWith(obj, binding.Query) } // ShouldBindYAML is a shortcut for c.ShouldBindWith(obj, binding.YAML). -func (c *Context) ShouldBindYAML(obj interface{}) error { +func (c *Context) ShouldBindYAML(obj any) error { return c.ShouldBindWith(obj, binding.YAML) } +// ShouldBindTOML is a shortcut for c.ShouldBindWith(obj, binding.TOML). +func (c *Context) ShouldBindTOML(obj interface{}) error { + return c.ShouldBindWith(obj, binding.TOML) +} + // ShouldBindHeader is a shortcut for c.ShouldBindWith(obj, binding.Header). -func (c *Context) ShouldBindHeader(obj interface{}) error { +func (c *Context) ShouldBindHeader(obj any) error { return c.ShouldBindWith(obj, binding.Header) } // ShouldBindUri binds the passed struct pointer using the specified binding engine. -func (c *Context) ShouldBindUri(obj interface{}) error { +func (c *Context) ShouldBindUri(obj any) error { m := make(map[string][]string) for _, v := range c.Params { m[v.Key] = []string{v.Value} @@ -702,7 +721,7 @@ func (c *Context) ShouldBindUri(obj interface{}) error { // ShouldBindWith binds the passed struct pointer using the specified binding engine. // See the binding package. -func (c *Context) ShouldBindWith(obj interface{}, b binding.Binding) error { +func (c *Context) ShouldBindWith(obj any, b binding.Binding) error { return b.Bind(c.Request, obj) } @@ -711,7 +730,7 @@ func (c *Context) ShouldBindWith(obj interface{}, b binding.Binding) error { // // NOTE: This method reads the body before binding. So you should use // ShouldBindWith for better performance if you need to call only once. -func (c *Context) ShouldBindBodyWith(obj interface{}, bb binding.BindingBody) (err error) { +func (c *Context) ShouldBindBodyWith(obj any, bb binding.BindingBody) (err error) { var body []byte if cb, ok := c.Get(BodyBytesKey); ok { if cbb, ok := cb.([]byte); ok { @@ -732,7 +751,7 @@ func (c *Context) ShouldBindBodyWith(obj interface{}, bb binding.BindingBody) (e // It called c.RemoteIP() under the hood, to check if the remote IP is a trusted proxy or not. // If it is it will then try to parse the headers defined in Engine.RemoteIPHeaders (defaulting to [X-Forwarded-For, X-Real-Ip]). // If the headers are not syntactically valid OR the remote IP does not correspond to a trusted proxy, -// the remote IP (coming form Request.RemoteAddr) is returned. +// the remote IP (coming from Request.RemoteAddr) is returned. func (c *Context) ClientIP() string { // Check if we're running on a trusted platform, continue running backwards if error if c.engine.TrustedPlatform != "" { @@ -750,10 +769,14 @@ func (c *Context) ClientIP() string { } } - remoteIP, trusted := c.RemoteIP() + // It also checks if the remoteIP is a trusted proxy or not. + // In order to perform this validation, it will see if the IP is contained within at least one of the CIDR blocks + // defined by Engine.SetTrustedProxies() + remoteIP := net.ParseIP(c.RemoteIP()) if remoteIP == nil { return "" } + trusted := c.engine.isTrustedProxy(remoteIP) if trusted && c.engine.ForwardedByClientIP && c.engine.RemoteIPHeaders != nil { for _, headerName := range c.engine.RemoteIPHeaders { @@ -766,53 +789,13 @@ func (c *Context) ClientIP() string { return remoteIP.String() } -func (e *Engine) isTrustedProxy(ip net.IP) bool { - if e.trustedCIDRs != nil { - for _, cidr := range e.trustedCIDRs { - if cidr.Contains(ip) { - return true - } - } - } - return false -} - // RemoteIP parses the IP from Request.RemoteAddr, normalizes and returns the IP (without the port). -// It also checks if the remoteIP is a trusted proxy or not. -// In order to perform this validation, it will see if the IP is contained within at least one of the CIDR blocks -// defined by Engine.SetTrustedProxies() -func (c *Context) RemoteIP() (net.IP, bool) { +func (c *Context) RemoteIP() string { ip, _, err := net.SplitHostPort(strings.TrimSpace(c.Request.RemoteAddr)) if err != nil { - return nil, false - } - remoteIP := net.ParseIP(ip) - if remoteIP == nil { - return nil, false - } - - return remoteIP, c.engine.isTrustedProxy(remoteIP) -} - -func (e *Engine) validateHeader(header string) (clientIP string, valid bool) { - if header == "" { - return "", false - } - items := strings.Split(header, ",") - for i := len(items) - 1; i >= 0; i-- { - ipStr := strings.TrimSpace(items[i]) - ip := net.ParseIP(ipStr) - if ip == nil { - return "", false - } - - // X-Forwarded-For is appended by proxy - // Check IPs in reverse order and stop when find untrusted proxy - if (i == 0) || (!e.isTrustedProxy(ip)) { - return ipStr, true - } + return "" } - return + return ip } // ContentType returns the Content-Type header of the request. @@ -856,7 +839,7 @@ func (c *Context) Status(code int) { c.Writer.WriteHeader(code) } -// Header is a intelligent shortcut for c.Writer.Header().Set(key, value). +// Header is an intelligent shortcut for c.Writer.Header().Set(key, value). // It writes a header in the response. // If value == "", this method removes the header `c.Writer.Header().Del(key)` func (c *Context) Header(key, value string) { @@ -872,7 +855,7 @@ func (c *Context) GetHeader(key string) string { return c.requestHeader(key) } -// GetRawData return stream data. +// GetRawData returns stream data. func (c *Context) GetRawData() ([]byte, error) { return ioutil.ReadAll(c.Request.Body) } @@ -932,30 +915,30 @@ func (c *Context) Render(code int, r render.Render) { // HTML renders the HTTP template specified by its file name. // It also updates the HTTP code and sets the Content-Type as "text/html". // See http://golang.org/doc/articles/wiki/ -func (c *Context) HTML(code int, name string, obj interface{}) { +func (c *Context) HTML(code int, name string, obj any) { instance := c.engine.HTMLRender.Instance(name, obj) c.Render(code, instance) } // IndentedJSON serializes the given struct as pretty JSON (indented + endlines) into the response body. // It also sets the Content-Type as "application/json". -// WARNING: we recommend to use this only for development purposes since printing pretty JSON is +// WARNING: we recommend using this only for development purposes since printing pretty JSON is // more CPU and bandwidth consuming. Use Context.JSON() instead. -func (c *Context) IndentedJSON(code int, obj interface{}) { +func (c *Context) IndentedJSON(code int, obj any) { c.Render(code, render.IndentedJSON{Data: obj}) } // SecureJSON serializes the given struct as Secure JSON into the response body. // Default prepends "while(1)," to response body if the given struct is array values. // It also sets the Content-Type as "application/json". -func (c *Context) SecureJSON(code int, obj interface{}) { +func (c *Context) SecureJSON(code int, obj any) { c.Render(code, render.SecureJSON{Prefix: c.engine.secureJSONPrefix, Data: obj}) } // JSONP serializes the given struct as JSON into the response body. // It adds padding to response body to request data from a server residing in a different domain than the client. // It also sets the Content-Type as "application/javascript". -func (c *Context) JSONP(code int, obj interface{}) { +func (c *Context) JSONP(code int, obj any) { callback := c.DefaultQuery("callback", "") if callback == "" { c.Render(code, render.JSON{Data: obj}) @@ -966,44 +949,49 @@ func (c *Context) JSONP(code int, obj interface{}) { // JSON serializes the given struct as JSON into the response body. // It also sets the Content-Type as "application/json". -func (c *Context) JSON(code int, obj interface{}) { +func (c *Context) JSON(code int, obj any) { c.Render(code, render.JSON{Data: obj}) } // AsciiJSON serializes the given struct as JSON into the response body with unicode to ASCII string. // It also sets the Content-Type as "application/json". -func (c *Context) AsciiJSON(code int, obj interface{}) { +func (c *Context) AsciiJSON(code int, obj any) { c.Render(code, render.AsciiJSON{Data: obj}) } // PureJSON serializes the given struct as JSON into the response body. // PureJSON, unlike JSON, does not replace special html characters with their unicode entities. -func (c *Context) PureJSON(code int, obj interface{}) { +func (c *Context) PureJSON(code int, obj any) { c.Render(code, render.PureJSON{Data: obj}) } // XML serializes the given struct as XML into the response body. // It also sets the Content-Type as "application/xml". -func (c *Context) XML(code int, obj interface{}) { +func (c *Context) XML(code int, obj any) { c.Render(code, render.XML{Data: obj}) } // YAML serializes the given struct as YAML into the response body. -func (c *Context) YAML(code int, obj interface{}) { +func (c *Context) YAML(code int, obj any) { c.Render(code, render.YAML{Data: obj}) } +// TOML serializes the given struct as TOML into the response body. +func (c *Context) TOML(code int, obj interface{}) { + c.Render(code, render.TOML{Data: obj}) +} + // ProtoBuf serializes the given struct as ProtoBuf into the response body. -func (c *Context) ProtoBuf(code int, obj interface{}) { +func (c *Context) ProtoBuf(code int, obj any) { c.Render(code, render.ProtoBuf{Data: obj}) } // String writes the given string into the response body. -func (c *Context) String(code int, format string, values ...interface{}) { +func (c *Context) String(code int, format string, values ...any) { c.Render(code, render.String{Format: format, Data: values}) } -// Redirect returns a HTTP redirect to the specific location. +// Redirect returns an HTTP redirect to the specific location. func (c *Context) Redirect(code int, location string) { c.Render(-1, render.Redirect{ Code: code, @@ -1049,12 +1037,16 @@ func (c *Context) FileFromFS(filepath string, fs http.FileSystem) { // FileAttachment writes the specified file into the body stream in an efficient way // On the client side, the file will typically be downloaded with the given filename func (c *Context) FileAttachment(filepath, filename string) { - c.Writer.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", filename)) + if isASCII(filename) { + c.Writer.Header().Set("Content-Disposition", `attachment; filename="`+filename+`"`) + } else { + c.Writer.Header().Set("Content-Disposition", `attachment; filename*=UTF-8''`+url.QueryEscape(filename)) + } http.ServeFile(c.Writer, c.Request, filepath) } // SSEvent writes a Server-Sent Event into the body stream. -func (c *Context) SSEvent(name string, message interface{}) { +func (c *Context) SSEvent(name string, message any) { c.Render(-1, sse.Event{ Event: name, Data: message, @@ -1088,14 +1080,15 @@ func (c *Context) Stream(step func(w io.Writer) bool) bool { type Negotiate struct { Offered []string HTMLName string - HTMLData interface{} - JSONData interface{} - XMLData interface{} - YAMLData interface{} - Data interface{} + HTMLData any + JSONData any + XMLData any + YAMLData any + Data any + TOMLData any } -// Negotiate calls different Render according acceptable Accept format. +// Negotiate calls different Render according to acceptable Accept format. func (c *Context) Negotiate(code int, config Negotiate) { switch c.NegotiateFormat(config.Offered...) { case binding.MIMEJSON: @@ -1114,6 +1107,10 @@ func (c *Context) Negotiate(code int, config Negotiate) { data := chooseData(config.YAMLData, config.Data) c.YAML(code, data) + case binding.MIMETOML: + data := chooseData(config.TOMLData, config.Data) + c.TOML(code, data) + default: c.AbortWithError(http.StatusNotAcceptable, errors.New("the accepted formats are not offered by the server")) // nolint: errcheck } @@ -1159,34 +1156,47 @@ func (c *Context) SetAccepted(formats ...string) { /***** GOLANG.ORG/X/NET/CONTEXT *****/ /************************************/ -// Deadline always returns that there is no deadline (ok==false), -// maybe you want to use Request.Context().Deadline() instead. +// Deadline returns that there is no deadline (ok==false) when c.Request has no Context. func (c *Context) Deadline() (deadline time.Time, ok bool) { - return + if !c.engine.ContextWithFallback || c.Request == nil || c.Request.Context() == nil { + return + } + return c.Request.Context().Deadline() } -// Done always returns nil (chan which will wait forever), -// if you want to abort your work when the connection was closed -// you should use Request.Context().Done() instead. +// Done returns nil (chan which will wait forever) when c.Request has no Context. func (c *Context) Done() <-chan struct{} { - return nil + if !c.engine.ContextWithFallback || c.Request == nil || c.Request.Context() == nil { + return nil + } + return c.Request.Context().Done() } -// Err always returns nil, maybe you want to use Request.Context().Err() instead. +// Err returns nil when c.Request has no Context. func (c *Context) Err() error { - return nil + if !c.engine.ContextWithFallback || c.Request == nil || c.Request.Context() == nil { + return nil + } + return c.Request.Context().Err() } // Value returns the value associated with this context for key, or nil // if no value is associated with key. Successive calls to Value with // the same key returns the same result. -func (c *Context) Value(key interface{}) interface{} { +func (c *Context) Value(key any) any { if key == 0 { return c.Request } + if key == ContextKey { + return c + } if keyAsString, ok := key.(string); ok { - val, _ := c.Get(keyAsString) - return val + if val, exists := c.Get(keyAsString); exists { + return val + } } - return nil + if !c.engine.ContextWithFallback || c.Request == nil || c.Request.Context() == nil { + return nil + } + return c.Request.Context().Value(key) } diff --git a/index/server/vendor/github.com/gin-gonic/gin/context_appengine.go b/index/server/vendor/github.com/gin-gonic/gin/context_appengine.go index 8bf93896..931313f6 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/context_appengine.go +++ b/index/server/vendor/github.com/gin-gonic/gin/context_appengine.go @@ -1,4 +1,4 @@ -// Copyright 2017 Manu Martinez-Almeida. All rights reserved. +// Copyright 2017 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. diff --git a/index/server/vendor/github.com/gin-gonic/gin/debug.go b/index/server/vendor/github.com/gin-gonic/gin/debug.go index 9bacc685..25fd7c87 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/debug.go +++ b/index/server/vendor/github.com/gin-gonic/gin/debug.go @@ -1,4 +1,4 @@ -// Copyright 2014 Manu Martinez-Almeida. All rights reserved. +// Copyright 2014 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. @@ -12,7 +12,7 @@ import ( "strings" ) -const ginSupportMinGoVer = 13 +const ginSupportMinGoVer = 14 // IsDebugging returns true if the framework is running in debug mode. // Use SetMode(gin.ReleaseMode) to disable debug mode. @@ -47,7 +47,7 @@ func debugPrintLoadTemplate(tmpl *template.Template) { } } -func debugPrint(format string, values ...interface{}) { +func debugPrint(format string, values ...any) { if IsDebugging() { if !strings.HasSuffix(format, "\n") { format += "\n" @@ -67,7 +67,7 @@ func getMinVer(v string) (uint64, error) { func debugPrintWARNINGDefault() { if v, e := getMinVer(runtime.Version()); e == nil && v <= ginSupportMinGoVer { - debugPrint(`[WARNING] Now Gin requires Go 1.13+. + debugPrint(`[WARNING] Now Gin requires Go 1.14+. `) } @@ -95,9 +95,7 @@ at initialization. ie. before any route is registered or the router is listening } func debugPrintError(err error) { - if err != nil { - if IsDebugging() { - fmt.Fprintf(DefaultErrorWriter, "[GIN-debug] [ERROR] %v\n", err) - } + if err != nil && IsDebugging() { + fmt.Fprintf(DefaultErrorWriter, "[GIN-debug] [ERROR] %v\n", err) } } diff --git a/index/server/vendor/github.com/gin-gonic/gin/deprecated.go b/index/server/vendor/github.com/gin-gonic/gin/deprecated.go index ab447429..fdad8554 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/deprecated.go +++ b/index/server/vendor/github.com/gin-gonic/gin/deprecated.go @@ -1,4 +1,4 @@ -// Copyright 2014 Manu Martinez-Almeida. All rights reserved. +// Copyright 2014 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. @@ -12,7 +12,7 @@ import ( // BindWith binds the passed struct pointer using the specified binding engine. // See the binding package. -func (c *Context) BindWith(obj interface{}, b binding.Binding) error { +func (c *Context) BindWith(obj any, b binding.Binding) error { log.Println(`BindWith(\"interface{}, binding.Binding\") error is going to be deprecated, please check issue #662 and either use MustBindWith() if you want HTTP 400 to be automatically returned if any error occur, or use diff --git a/index/server/vendor/github.com/gin-gonic/gin/errors.go b/index/server/vendor/github.com/gin-gonic/gin/errors.go index 0f276c13..2853ce8e 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/errors.go +++ b/index/server/vendor/github.com/gin-gonic/gin/errors.go @@ -1,4 +1,4 @@ -// Copyright 2014 Manu Martinez-Almeida. All rights reserved. +// Copyright 2014 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. @@ -34,7 +34,7 @@ const ( type Error struct { Err error Type ErrorType - Meta interface{} + Meta any } type errorMsgs []*Error @@ -48,13 +48,13 @@ func (msg *Error) SetType(flags ErrorType) *Error { } // SetMeta sets the error's meta data. -func (msg *Error) SetMeta(data interface{}) *Error { +func (msg *Error) SetMeta(data any) *Error { msg.Meta = data return msg } // JSON creates a properly formatted JSON -func (msg *Error) JSON() interface{} { +func (msg *Error) JSON() any { jsonData := H{} if msg.Meta != nil { value := reflect.ValueOf(msg.Meta) @@ -122,7 +122,7 @@ func (a errorMsgs) Last() *Error { return nil } -// Errors returns an array will all the error messages. +// Errors returns an array with all the error messages. // Example: // c.Error(errors.New("first")) // c.Error(errors.New("second")) @@ -139,14 +139,14 @@ func (a errorMsgs) Errors() []string { return errorStrings } -func (a errorMsgs) JSON() interface{} { +func (a errorMsgs) JSON() any { switch length := len(a); length { case 0: return nil case 1: return a.Last().JSON() default: - jsonData := make([]interface{}, length) + jsonData := make([]any, length) for i, err := range a { jsonData[i] = err.JSON() } diff --git a/index/server/vendor/github.com/gin-gonic/gin/fs.go b/index/server/vendor/github.com/gin-gonic/gin/fs.go index 007d9b75..64274735 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/fs.go +++ b/index/server/vendor/github.com/gin-gonic/gin/fs.go @@ -1,4 +1,4 @@ -// Copyright 2017 Manu Martinez-Almeida. All rights reserved. +// Copyright 2017 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. @@ -17,7 +17,7 @@ type neuteredReaddirFile struct { http.File } -// Dir returns a http.Filesystem that can be used by http.FileServer(). It is used internally +// Dir returns a http.FileSystem that can be used by http.FileServer(). It is used internally // in router.Static(). // if listDirectory == true, then it works the same as http.Dir() otherwise it returns // a filesystem that prevents http.FileServer() to list the directory files. diff --git a/index/server/vendor/github.com/gin-gonic/gin/gin.go b/index/server/vendor/github.com/gin-gonic/gin/gin.go index 58e76f41..f9324299 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/gin.go +++ b/index/server/vendor/github.com/gin-gonic/gin/gin.go @@ -1,4 +1,4 @@ -// Copyright 2014 Manu Martinez-Almeida. All rights reserved. +// Copyright 2014 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. @@ -11,12 +11,13 @@ import ( "net/http" "os" "path" - "reflect" "strings" "sync" "github.com/gin-gonic/gin/internal/bytesconv" "github.com/gin-gonic/gin/render" + "golang.org/x/net/http2" + "golang.org/x/net/http2/h2c" ) const defaultMultipartMemory = 32 << 20 // 32 MB @@ -28,15 +29,24 @@ var ( var defaultPlatform string -var defaultTrustedCIDRs = []*net.IPNet{{IP: net.IP{0x0, 0x0, 0x0, 0x0}, Mask: net.IPMask{0x0, 0x0, 0x0, 0x0}}} // 0.0.0.0/0 +var defaultTrustedCIDRs = []*net.IPNet{ + { // 0.0.0.0/0 (IPv4) + IP: net.IP{0x0, 0x0, 0x0, 0x0}, + Mask: net.IPMask{0x0, 0x0, 0x0, 0x0}, + }, + { // ::/0 (IPv6) + IP: net.IP{0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0}, + Mask: net.IPMask{0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0}, + }, +} // HandlerFunc defines the handler used by gin middleware as return value. type HandlerFunc func(*Context) -// HandlersChain defines a HandlerFunc array. +// HandlersChain defines a HandlerFunc slice. type HandlersChain []HandlerFunc -// Last returns the last handler in the chain. ie. the last handler is the main one. +// Last returns the last handler in the chain. i.e. the last handler is the main one. func (c HandlersChain) Last() HandlerFunc { if length := len(c); length > 0 { return c[length-1] @@ -52,15 +62,15 @@ type RouteInfo struct { HandlerFunc HandlerFunc } -// RoutesInfo defines a RouteInfo array. +// RoutesInfo defines a RouteInfo slice. type RoutesInfo []RouteInfo // Trusted platforms const ( - // When running on Google App Engine. Trust X-Appengine-Remote-Addr + // PlatformGoogleAppEngine when running on Google App Engine. Trust X-Appengine-Remote-Addr // for determining the client's IP PlatformGoogleAppEngine = "X-Appengine-Remote-Addr" - // When using Cloudflare's CDN. Trust CF-Connecting-IP for determining + // PlatformCloudflare when using Cloudflare's CDN. Trust CF-Connecting-IP for determining // the client's IP PlatformCloudflare = "CF-Connecting-IP" ) @@ -70,14 +80,14 @@ const ( type Engine struct { RouterGroup - // Enables automatic redirection if the current route can't be matched but a + // RedirectTrailingSlash enables automatic redirection if the current route can't be matched but a // handler for the path with (without) the trailing slash exists. // For example if /foo/ is requested but a route only exists for /foo, the // client is redirected to /foo with http status code 301 for GET requests // and 307 for all other request methods. RedirectTrailingSlash bool - // If enabled, the router tries to fix the current request path, if no + // RedirectFixedPath if enabled, the router tries to fix the current request path, if no // handle is registered for it. // First superfluous path elements like ../ or // are removed. // Afterwards the router does a case-insensitive lookup of the cleaned path. @@ -88,7 +98,7 @@ type Engine struct { // RedirectTrailingSlash is independent of this option. RedirectFixedPath bool - // If enabled, the router checks if another method is allowed for the + // HandleMethodNotAllowed if enabled, the router checks if another method is allowed for the // current route, if the current request can not be routed. // If this is the case, the request is answered with 'Method Not Allowed' // and HTTP status code 405. @@ -96,21 +106,22 @@ type Engine struct { // handler. HandleMethodNotAllowed bool - // If enabled, client IP will be parsed from the request's headers that + // ForwardedByClientIP if enabled, client IP will be parsed from the request's headers that // match those stored at `(*gin.Engine).RemoteIPHeaders`. If no IP was // fetched, it falls back to the IP obtained from // `(*gin.Context).Request.RemoteAddr`. ForwardedByClientIP bool - // DEPRECATED: USE `TrustedPlatform` WITH VALUE `gin.GoogleAppEngine` INSTEAD + // AppEngine was deprecated. + // Deprecated: USE `TrustedPlatform` WITH VALUE `gin.PlatformGoogleAppEngine` INSTEAD // #726 #755 If enabled, it will trust some headers starting with // 'X-AppEngine...' for better integration with that PaaS. AppEngine bool - // If enabled, the url.RawPath will be used to find parameters. + // UseRawPath if enabled, the url.RawPath will be used to find parameters. UseRawPath bool - // If true, the path value will be unescaped. + // UnescapePathValues if true, the path value will be unescaped. // If UseRawPath is false (by default), the UnescapePathValues effectively is true, // as url.Path gonna be used, which is already unescaped. UnescapePathValues bool @@ -119,20 +130,26 @@ type Engine struct { // See the PR #1817 and issue #1644 RemoveExtraSlash bool - // List of headers used to obtain the client IP when + // RemoteIPHeaders list of headers used to obtain the client IP when // `(*gin.Engine).ForwardedByClientIP` is `true` and // `(*gin.Context).Request.RemoteAddr` is matched by at least one of the // network origins of list defined by `(*gin.Engine).SetTrustedProxies()`. RemoteIPHeaders []string - // If set to a constant of value gin.Platform*, trusts the headers set by + // TrustedPlatform if set to a constant of value gin.Platform*, trusts the headers set by // that platform, for example to determine the client IP TrustedPlatform string - // Value of 'maxMemory' param that is given to http.Request's ParseMultipartForm + // MaxMultipartMemory value of 'maxMemory' param that is given to http.Request's ParseMultipartForm // method call. MaxMultipartMemory int64 + // UseH2C enable h2c support. + UseH2C bool + + // ContextWithFallback enable fallback Context.Deadline(), Context.Done(), Context.Err() and Context.Value() when Context.Request.Context() is not nil. + ContextWithFallback bool + delims render.Delims secureJSONPrefix string HTMLRender render.HTMLRender @@ -152,7 +169,7 @@ type Engine struct { var _ IRouter = &Engine{} // New returns a new blank Engine instance without any middleware attached. -// By default the configuration is: +// By default, the configuration is: // - RedirectTrailingSlash: true // - RedirectFixedPath: false // - HandleMethodNotAllowed: false @@ -181,11 +198,11 @@ func New() *Engine { trees: make(methodTrees, 0, 9), delims: render.Delims{Left: "{{", Right: "}}"}, secureJSONPrefix: "while(1);", - trustedProxies: []string{"0.0.0.0/0"}, + trustedProxies: []string{"0.0.0.0/0", "::/0"}, trustedCIDRs: defaultTrustedCIDRs, } engine.RouterGroup.engine = engine - engine.pool.New = func() interface{} { + engine.pool.New = func() any { return engine.allocateContext() } return engine @@ -199,13 +216,22 @@ func Default() *Engine { return engine } +func (engine *Engine) Handler() http.Handler { + if !engine.UseH2C { + return engine + } + + h2s := &http2.Server{} + return h2c.NewHandler(engine, h2s) +} + func (engine *Engine) allocateContext() *Context { v := make(Params, 0, engine.maxParams) skippedNodes := make([]skippedNode, 0, engine.maxSections) return &Context{engine: engine, params: &v, skippedNodes: &skippedNodes} } -// Delims sets template left and right delims and returns a Engine instance. +// Delims sets template left and right delims and returns an Engine instance. func (engine *Engine) Delims(left, right string) *Engine { engine.delims = render.Delims{Left: left, Right: right} return engine @@ -259,7 +285,7 @@ func (engine *Engine) SetFuncMap(funcMap template.FuncMap) { engine.FuncMap = funcMap } -// NoRoute adds handlers for NoRoute. It return a 404 code by default. +// NoRoute adds handlers for NoRoute. It returns a 404 code by default. func (engine *Engine) NoRoute(handlers ...HandlerFunc) { engine.noRoute = handlers engine.rebuild404Handlers() @@ -271,7 +297,7 @@ func (engine *Engine) NoMethod(handlers ...HandlerFunc) { engine.rebuild405Handlers() } -// Use attaches a global middleware to the router. ie. the middleware attached though Use() will be +// Use attaches a global middleware to the router. i.e. the middleware attached through Use() will be // included in the handlers chain for every single request. Even 404, 405, static files... // For example, this is the right place for a logger or error management middleware. func (engine *Engine) Use(middleware ...HandlerFunc) IRoutes { @@ -353,7 +379,7 @@ func (engine *Engine) Run(addr ...string) (err error) { address := resolveAddress(addr) debugPrint("Listening and serving HTTP on %s\n", address) - err = http.ListenAndServe(address, engine) + err = http.ListenAndServe(address, engine.Handler()) return } @@ -399,9 +425,9 @@ func (engine *Engine) SetTrustedProxies(trustedProxies []string) error { return engine.parseTrustedProxies() } -// isUnsafeTrustedProxies compares Engine.trustedCIDRs and defaultTrustedCIDRs, it's not safe if equal (returns true) +// isUnsafeTrustedProxies checks if Engine.trustedCIDRs contains all IPs, it's not safe if it has (returns true) func (engine *Engine) isUnsafeTrustedProxies() bool { - return reflect.DeepEqual(engine.trustedCIDRs, defaultTrustedCIDRs) + return engine.isTrustedProxy(net.ParseIP("0.0.0.0")) || engine.isTrustedProxy(net.ParseIP("::")) } // parseTrustedProxies parse Engine.trustedProxies to Engine.trustedCIDRs @@ -411,6 +437,41 @@ func (engine *Engine) parseTrustedProxies() error { return err } +// isTrustedProxy will check whether the IP address is included in the trusted list according to Engine.trustedCIDRs +func (engine *Engine) isTrustedProxy(ip net.IP) bool { + if engine.trustedCIDRs == nil { + return false + } + for _, cidr := range engine.trustedCIDRs { + if cidr.Contains(ip) { + return true + } + } + return false +} + +// validateHeader will parse X-Forwarded-For header and return the trusted client IP address +func (engine *Engine) validateHeader(header string) (clientIP string, valid bool) { + if header == "" { + return "", false + } + items := strings.Split(header, ",") + for i := len(items) - 1; i >= 0; i-- { + ipStr := strings.TrimSpace(items[i]) + ip := net.ParseIP(ipStr) + if ip == nil { + break + } + + // X-Forwarded-For is appended by proxy + // Check IPs in reverse order and stop when find untrusted proxy + if (i == 0) || (!engine.isTrustedProxy(ip)) { + return ipStr, true + } + } + return "", false +} + // parseIP parse a string representation of an IP and returns a net.IP with the // minimum byte representation or nil if input is invalid. func parseIP(ip string) net.IP { @@ -437,12 +498,12 @@ func (engine *Engine) RunTLS(addr, certFile, keyFile string) (err error) { "Please check https://pkg.go.dev/github.com/gin-gonic/gin#readme-don-t-trust-all-proxies for details.") } - err = http.ListenAndServeTLS(addr, certFile, keyFile, engine) + err = http.ListenAndServeTLS(addr, certFile, keyFile, engine.Handler()) return } // RunUnix attaches the router to a http.Server and starts listening and serving HTTP requests -// through the specified unix socket (ie. a file). +// through the specified unix socket (i.e. a file). // Note: this method will block the calling goroutine indefinitely unless an error happens. func (engine *Engine) RunUnix(file string) (err error) { debugPrint("Listening and serving HTTP on unix:/%s", file) @@ -460,7 +521,7 @@ func (engine *Engine) RunUnix(file string) (err error) { defer listener.Close() defer os.Remove(file) - err = http.Serve(listener, engine) + err = http.Serve(listener, engine.Handler()) return } @@ -497,7 +558,7 @@ func (engine *Engine) RunListener(listener net.Listener) (err error) { "Please check https://pkg.go.dev/github.com/gin-gonic/gin#readme-don-t-trust-all-proxies for details.") } - err = http.Serve(listener, engine) + err = http.Serve(listener, engine.Handler()) return } @@ -513,9 +574,9 @@ func (engine *Engine) ServeHTTP(w http.ResponseWriter, req *http.Request) { engine.pool.Put(c) } -// HandleContext re-enter a context that has been rewritten. +// HandleContext re-enters a context that has been rewritten. // This can be done by setting c.Request.URL.Path to your new target. -// Disclaimer: You can loop yourself to death with this, use wisely. +// Disclaimer: You can loop yourself to deal with this, use wisely. func (engine *Engine) HandleContext(c *Context) { oldIndexValue := c.index c.reset() @@ -556,7 +617,7 @@ func (engine *Engine) handleHTTPRequest(c *Context) { c.writermem.WriteHeaderNow() return } - if httpMethod != "CONNECT" && rPath != "/" { + if httpMethod != http.MethodConnect && rPath != "/" { if value.tsr && engine.RedirectTrailingSlash { redirectTrailingSlash(c) return diff --git a/index/server/vendor/github.com/gin-gonic/gin/internal/json/go_json.go b/index/server/vendor/github.com/gin-gonic/gin/internal/json/go_json.go new file mode 100644 index 00000000..23f71726 --- /dev/null +++ b/index/server/vendor/github.com/gin-gonic/gin/internal/json/go_json.go @@ -0,0 +1,23 @@ +// Copyright 2017 Bo-Yi Wu. All rights reserved. +// Use of this source code is governed by a MIT style +// license that can be found in the LICENSE file. + +//go:build go_json +// +build go_json + +package json + +import json "github.com/goccy/go-json" + +var ( + // Marshal is exported by gin/json package. + Marshal = json.Marshal + // Unmarshal is exported by gin/json package. + Unmarshal = json.Unmarshal + // MarshalIndent is exported by gin/json package. + MarshalIndent = json.MarshalIndent + // NewDecoder is exported by gin/json package. + NewDecoder = json.NewDecoder + // NewEncoder is exported by gin/json package. + NewEncoder = json.NewEncoder +) diff --git a/index/server/vendor/github.com/gin-gonic/gin/internal/json/json.go b/index/server/vendor/github.com/gin-gonic/gin/internal/json/json.go index 172aeb24..a26d7db2 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/internal/json/json.go +++ b/index/server/vendor/github.com/gin-gonic/gin/internal/json/json.go @@ -1,9 +1,9 @@ -// Copyright 2017 Bo-Yi Wu. All rights reserved. +// Copyright 2017 Bo-Yi Wu. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. -//go:build !jsoniter -// +build !jsoniter +//go:build !jsoniter && !go_json +// +build !jsoniter,!go_json package json diff --git a/index/server/vendor/github.com/gin-gonic/gin/internal/json/jsoniter.go b/index/server/vendor/github.com/gin-gonic/gin/internal/json/jsoniter.go index 232f8dca..853b1a90 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/internal/json/jsoniter.go +++ b/index/server/vendor/github.com/gin-gonic/gin/internal/json/jsoniter.go @@ -1,4 +1,4 @@ -// Copyright 2017 Bo-Yi Wu. All rights reserved. +// Copyright 2017 Bo-Yi Wu. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. diff --git a/index/server/vendor/github.com/gin-gonic/gin/logger.go b/index/server/vendor/github.com/gin-gonic/gin/logger.go index d361b74d..cd1e7fa6 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/logger.go +++ b/index/server/vendor/github.com/gin-gonic/gin/logger.go @@ -1,4 +1,4 @@ -// Copyright 2014 Manu Martinez-Almeida. All rights reserved. +// Copyright 2014 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. @@ -44,7 +44,7 @@ type LoggerConfig struct { // Optional. Default value is gin.DefaultWriter. Output io.Writer - // SkipPaths is a url path array which logs are not written. + // SkipPaths is an url path array which logs are not written. // Optional. SkipPaths []string } @@ -70,12 +70,12 @@ type LogFormatterParams struct { Path string // ErrorMessage is set if error has occurred in processing the request. ErrorMessage string - // isTerm shows whether does gin's output descriptor refers to a terminal. + // isTerm shows whether gin's output descriptor refers to a terminal. isTerm bool // BodySize is the size of the Response Body BodySize int // Keys are the keys set on the request's context. - Keys map[string]interface{} + Keys map[string]any } // StatusCodeColor is the ANSI color for appropriately logging http status code to a terminal. @@ -138,8 +138,7 @@ var defaultLogFormatter = func(param LogFormatterParams) string { } if param.Latency > time.Minute { - // Truncate in a golang < 1.8 safe way - param.Latency = param.Latency - param.Latency%time.Second + param.Latency = param.Latency.Truncate(time.Second) } return fmt.Sprintf("[GIN] %v |%s %3d %s| %13v | %15s |%s %-7s %s %#v\n%s", param.TimeStamp.Format("2006/01/02 - 15:04:05"), @@ -162,12 +161,12 @@ func ForceConsoleColor() { consoleColorMode = forceColor } -// ErrorLogger returns a handlerfunc for any error type. +// ErrorLogger returns a HandlerFunc for any error type. func ErrorLogger() HandlerFunc { return ErrorLoggerT(ErrorTypeAny) } -// ErrorLoggerT returns a handlerfunc for a given error type. +// ErrorLoggerT returns a HandlerFunc for a given error type. func ErrorLoggerT(typ ErrorType) HandlerFunc { return func(c *Context) { c.Next() @@ -179,7 +178,7 @@ func ErrorLoggerT(typ ErrorType) HandlerFunc { } // Logger instances a Logger middleware that will write the logs to gin.DefaultWriter. -// By default gin.DefaultWriter = os.Stdout. +// By default, gin.DefaultWriter = os.Stdout. func Logger() HandlerFunc { return LoggerWithConfig(LoggerConfig{}) } diff --git a/index/server/vendor/github.com/gin-gonic/gin/mode.go b/index/server/vendor/github.com/gin-gonic/gin/mode.go index c8813aff..545fdaaf 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/mode.go +++ b/index/server/vendor/github.com/gin-gonic/gin/mode.go @@ -1,10 +1,11 @@ -// Copyright 2014 Manu Martinez-Almeida. All rights reserved. +// Copyright 2014 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. package gin import ( + "flag" "io" "os" @@ -41,8 +42,10 @@ var DefaultWriter io.Writer = os.Stdout // DefaultErrorWriter is the default io.Writer used by Gin to debug errors var DefaultErrorWriter io.Writer = os.Stderr -var ginMode = debugCode -var modeName = DebugMode +var ( + ginMode = debugCode + modeName = DebugMode +) func init() { mode := os.Getenv(EnvGinMode) @@ -52,7 +55,11 @@ func init() { // SetMode sets gin mode according to input string. func SetMode(value string) { if value == "" { - value = DebugMode + if flag.Lookup("test.v") != nil { + value = TestMode + } else { + value = DebugMode + } } switch value { @@ -86,7 +93,7 @@ func EnableJsonDecoderDisallowUnknownFields() { binding.EnableDecoderDisallowUnknownFields = true } -// Mode returns currently gin mode. +// Mode returns current gin mode. func Mode() string { return modeName } diff --git a/index/server/vendor/github.com/gin-gonic/gin/recovery.go b/index/server/vendor/github.com/gin-gonic/gin/recovery.go index 563f5aaa..abb64510 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/recovery.go +++ b/index/server/vendor/github.com/gin-gonic/gin/recovery.go @@ -1,4 +1,4 @@ -// Copyright 2014 Manu Martinez-Almeida. All rights reserved. +// Copyright 2014 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. @@ -6,6 +6,7 @@ package gin import ( "bytes" + "errors" "fmt" "io" "io/ioutil" @@ -27,14 +28,14 @@ var ( ) // RecoveryFunc defines the function passable to CustomRecovery. -type RecoveryFunc func(c *Context, err interface{}) +type RecoveryFunc func(c *Context, err any) // Recovery returns a middleware that recovers from any panics and writes a 500 if there was one. func Recovery() HandlerFunc { return RecoveryWithWriter(DefaultErrorWriter) } -//CustomRecovery returns a middleware that recovers from any panics and calls the provided handle func to handle it. +// CustomRecovery returns a middleware that recovers from any panics and calls the provided handle func to handle it. func CustomRecovery(handle RecoveryFunc) HandlerFunc { return RecoveryWithWriter(DefaultErrorWriter, handle) } @@ -60,7 +61,8 @@ func CustomRecoveryWithWriter(out io.Writer, handle RecoveryFunc) HandlerFunc { // condition that warrants a panic stack trace. var brokenPipe bool if ne, ok := err.(*net.OpError); ok { - if se, ok := ne.Err.(*os.SyscallError); ok { + var se *os.SyscallError + if errors.As(ne, &se) { if strings.Contains(strings.ToLower(se.Error()), "broken pipe") || strings.Contains(strings.ToLower(se.Error()), "connection reset by peer") { brokenPipe = true } @@ -100,7 +102,7 @@ func CustomRecoveryWithWriter(out io.Writer, handle RecoveryFunc) HandlerFunc { } } -func defaultHandleRecovery(c *Context, err interface{}) { +func defaultHandleRecovery(c *Context, err any) { c.AbortWithStatus(http.StatusInternalServerError) } @@ -153,7 +155,7 @@ func function(pc uintptr) []byte { // runtime/debug.*T·ptrmethod // and want // *T.ptrmethod - // Also the package path might contains dot (e.g. code.google.com/...), + // Also the package path might contain dot (e.g. code.google.com/...), // so first eliminate the path prefix if lastSlash := bytes.LastIndex(name, slash); lastSlash >= 0 { name = name[lastSlash+1:] @@ -165,7 +167,7 @@ func function(pc uintptr) []byte { return name } +// timeFormat returns a customized time string for logger. func timeFormat(t time.Time) string { - timeString := t.Format("2006/01/02 - 15:04:05") - return timeString + return t.Format("2006/01/02 - 15:04:05") } diff --git a/index/server/vendor/github.com/gin-gonic/gin/render/any.go b/index/server/vendor/github.com/gin-gonic/gin/render/any.go new file mode 100644 index 00000000..b19ad45d --- /dev/null +++ b/index/server/vendor/github.com/gin-gonic/gin/render/any.go @@ -0,0 +1,10 @@ +// Copyright 2021 Gin Core Team. All rights reserved. +// Use of this source code is governed by a MIT style +// license that can be found in the LICENSE file. + +//go:build !go1.18 +// +build !go1.18 + +package render + +type any = interface{} diff --git a/index/server/vendor/github.com/gin-gonic/gin/render/data.go b/index/server/vendor/github.com/gin-gonic/gin/render/data.go index 6ba657ba..a653ea30 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/render/data.go +++ b/index/server/vendor/github.com/gin-gonic/gin/render/data.go @@ -1,4 +1,4 @@ -// Copyright 2014 Manu Martinez-Almeida. All rights reserved. +// Copyright 2014 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. diff --git a/index/server/vendor/github.com/gin-gonic/gin/render/html.go b/index/server/vendor/github.com/gin-gonic/gin/render/html.go index 6696ece9..c308408d 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/render/html.go +++ b/index/server/vendor/github.com/gin-gonic/gin/render/html.go @@ -1,4 +1,4 @@ -// Copyright 2014 Manu Martinez-Almeida. All rights reserved. +// Copyright 2014 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. @@ -20,7 +20,7 @@ type Delims struct { // HTMLRender interface is to be implemented by HTMLProduction and HTMLDebug. type HTMLRender interface { // Instance returns an HTML instance. - Instance(string, interface{}) Render + Instance(string, any) Render } // HTMLProduction contains template reference and its delims. @@ -41,13 +41,13 @@ type HTMLDebug struct { type HTML struct { Template *template.Template Name string - Data interface{} + Data any } var htmlContentType = []string{"text/html; charset=utf-8"} // Instance (HTMLProduction) returns an HTML instance which it realizes Render interface. -func (r HTMLProduction) Instance(name string, data interface{}) Render { +func (r HTMLProduction) Instance(name string, data any) Render { return HTML{ Template: r.Template, Name: name, @@ -56,7 +56,7 @@ func (r HTMLProduction) Instance(name string, data interface{}) Render { } // Instance (HTMLDebug) returns an HTML instance which it realizes Render interface. -func (r HTMLDebug) Instance(name string, data interface{}) Render { +func (r HTMLDebug) Instance(name string, data any) Render { return HTML{ Template: r.loadTemplate(), Name: name, diff --git a/index/server/vendor/github.com/gin-gonic/gin/render/json.go b/index/server/vendor/github.com/gin-gonic/gin/render/json.go index 41863093..af678e80 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/render/json.go +++ b/index/server/vendor/github.com/gin-gonic/gin/render/json.go @@ -1,4 +1,4 @@ -// Copyright 2014 Manu Martinez-Almeida. All rights reserved. +// Copyright 2014 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. @@ -16,39 +16,41 @@ import ( // JSON contains the given interface object. type JSON struct { - Data interface{} + Data any } // IndentedJSON contains the given interface object. type IndentedJSON struct { - Data interface{} + Data any } // SecureJSON contains the given interface object and its prefix. type SecureJSON struct { Prefix string - Data interface{} + Data any } // JsonpJSON contains the given interface object its callback. type JsonpJSON struct { Callback string - Data interface{} + Data any } // AsciiJSON contains the given interface object. type AsciiJSON struct { - Data interface{} + Data any } // PureJSON contains the given interface object. type PureJSON struct { - Data interface{} + Data any } -var jsonContentType = []string{"application/json; charset=utf-8"} -var jsonpContentType = []string{"application/javascript; charset=utf-8"} -var jsonAsciiContentType = []string{"application/json"} +var ( + jsonContentType = []string{"application/json; charset=utf-8"} + jsonpContentType = []string{"application/javascript; charset=utf-8"} + jsonASCIIContentType = []string{"application/json"} +) // Render (JSON) writes data with custom ContentType. func (r JSON) Render(w http.ResponseWriter) (err error) { @@ -64,7 +66,7 @@ func (r JSON) WriteContentType(w http.ResponseWriter) { } // WriteJSON marshals the given interface object and writes it with custom ContentType. -func WriteJSON(w http.ResponseWriter, obj interface{}) error { +func WriteJSON(w http.ResponseWriter, obj any) error { writeContentType(w, jsonContentType) jsonBytes, err := json.Marshal(obj) if err != nil { @@ -100,8 +102,7 @@ func (r SecureJSON) Render(w http.ResponseWriter) error { // if the jsonBytes is array values if bytes.HasPrefix(jsonBytes, bytesconv.StringToBytes("[")) && bytes.HasSuffix(jsonBytes, bytesconv.StringToBytes("]")) { - _, err = w.Write(bytesconv.StringToBytes(r.Prefix)) - if err != nil { + if _, err = w.Write(bytesconv.StringToBytes(r.Prefix)); err != nil { return err } } @@ -128,20 +129,19 @@ func (r JsonpJSON) Render(w http.ResponseWriter) (err error) { } callback := template.JSEscapeString(r.Callback) - _, err = w.Write(bytesconv.StringToBytes(callback)) - if err != nil { + if _, err = w.Write(bytesconv.StringToBytes(callback)); err != nil { return err } - _, err = w.Write(bytesconv.StringToBytes("(")) - if err != nil { + + if _, err = w.Write(bytesconv.StringToBytes("(")); err != nil { return err } - _, err = w.Write(ret) - if err != nil { + + if _, err = w.Write(ret); err != nil { return err } - _, err = w.Write(bytesconv.StringToBytes(");")) - if err != nil { + + if _, err = w.Write(bytesconv.StringToBytes(");")); err != nil { return err } @@ -176,7 +176,7 @@ func (r AsciiJSON) Render(w http.ResponseWriter) (err error) { // WriteContentType (AsciiJSON) writes JSON ContentType. func (r AsciiJSON) WriteContentType(w http.ResponseWriter) { - writeContentType(w, jsonAsciiContentType) + writeContentType(w, jsonASCIIContentType) } // Render (PureJSON) writes custom ContentType and encodes the given interface object. diff --git a/index/server/vendor/github.com/gin-gonic/gin/render/msgpack.go b/index/server/vendor/github.com/gin-gonic/gin/render/msgpack.go index 6ef5b6e5..e0f30f7a 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/render/msgpack.go +++ b/index/server/vendor/github.com/gin-gonic/gin/render/msgpack.go @@ -1,4 +1,4 @@ -// Copyright 2017 Manu Martinez-Almeida. All rights reserved. +// Copyright 2017 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. @@ -13,13 +13,15 @@ import ( "github.com/ugorji/go/codec" ) +// Check interface implemented here to support go build tag nomsgpack. +// See: https://github.com/gin-gonic/gin/pull/1852/ var ( _ Render = MsgPack{} ) // MsgPack contains the given interface object. type MsgPack struct { - Data interface{} + Data any } var msgpackContentType = []string{"application/msgpack; charset=utf-8"} @@ -35,7 +37,7 @@ func (r MsgPack) Render(w http.ResponseWriter) error { } // WriteMsgPack writes MsgPack ContentType and encodes the given interface object. -func WriteMsgPack(w http.ResponseWriter, obj interface{}) error { +func WriteMsgPack(w http.ResponseWriter, obj any) error { writeContentType(w, msgpackContentType) var mh codec.MsgpackHandle return codec.NewEncoder(w, &mh).Encode(obj) diff --git a/index/server/vendor/github.com/gin-gonic/gin/render/protobuf.go b/index/server/vendor/github.com/gin-gonic/gin/render/protobuf.go index 15aca995..9331c405 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/render/protobuf.go +++ b/index/server/vendor/github.com/gin-gonic/gin/render/protobuf.go @@ -1,4 +1,4 @@ -// Copyright 2018 Gin Core Team. All rights reserved. +// Copyright 2018 Gin Core Team. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. @@ -7,12 +7,12 @@ package render import ( "net/http" - "github.com/golang/protobuf/proto" + "google.golang.org/protobuf/proto" ) // ProtoBuf contains the given interface object. type ProtoBuf struct { - Data interface{} + Data any } var protobufContentType = []string{"application/x-protobuf"} diff --git a/index/server/vendor/github.com/gin-gonic/gin/render/reader.go b/index/server/vendor/github.com/gin-gonic/gin/render/reader.go index d5282e49..5752d8d8 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/render/reader.go +++ b/index/server/vendor/github.com/gin-gonic/gin/render/reader.go @@ -1,4 +1,4 @@ -// Copyright 2018 Gin Core Team. All rights reserved. +// Copyright 2018 Gin Core Team. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. diff --git a/index/server/vendor/github.com/gin-gonic/gin/render/redirect.go b/index/server/vendor/github.com/gin-gonic/gin/render/redirect.go index c006691c..70e3a47e 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/render/redirect.go +++ b/index/server/vendor/github.com/gin-gonic/gin/render/redirect.go @@ -1,4 +1,4 @@ -// Copyright 2014 Manu Martinez-Almeida. All rights reserved. +// Copyright 2014 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. diff --git a/index/server/vendor/github.com/gin-gonic/gin/render/render.go b/index/server/vendor/github.com/gin-gonic/gin/render/render.go index bcd568bf..7955000c 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/render/render.go +++ b/index/server/vendor/github.com/gin-gonic/gin/render/render.go @@ -1,4 +1,4 @@ -// Copyright 2014 Manu Martinez-Almeida. All rights reserved. +// Copyright 2014 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. @@ -30,6 +30,7 @@ var ( _ Render = Reader{} _ Render = AsciiJSON{} _ Render = ProtoBuf{} + _ Render = TOML{} ) func writeContentType(w http.ResponseWriter, value []string) { diff --git a/index/server/vendor/github.com/gin-gonic/gin/render/text.go b/index/server/vendor/github.com/gin-gonic/gin/render/text.go index 461b720a..77eafdfd 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/render/text.go +++ b/index/server/vendor/github.com/gin-gonic/gin/render/text.go @@ -1,4 +1,4 @@ -// Copyright 2014 Manu Martinez-Almeida. All rights reserved. +// Copyright 2014 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. @@ -14,7 +14,7 @@ import ( // String contains the given interface object slice and its format. type String struct { Format string - Data []interface{} + Data []any } var plainContentType = []string{"text/plain; charset=utf-8"} @@ -30,7 +30,7 @@ func (r String) WriteContentType(w http.ResponseWriter) { } // WriteString writes data according to its format and write custom ContentType. -func WriteString(w http.ResponseWriter, format string, data []interface{}) (err error) { +func WriteString(w http.ResponseWriter, format string, data []any) (err error) { writeContentType(w, plainContentType) if len(data) > 0 { _, err = fmt.Fprintf(w, format, data...) diff --git a/index/server/vendor/github.com/gin-gonic/gin/render/toml.go b/index/server/vendor/github.com/gin-gonic/gin/render/toml.go new file mode 100644 index 00000000..40f044c8 --- /dev/null +++ b/index/server/vendor/github.com/gin-gonic/gin/render/toml.go @@ -0,0 +1,36 @@ +// Copyright 2022 Gin Core Team. All rights reserved. +// Use of this source code is governed by a MIT style +// license that can be found in the LICENSE file. + +package render + +import ( + "net/http" + + "github.com/pelletier/go-toml/v2" +) + +// TOML contains the given interface object. +type TOML struct { + Data any +} + +var TOMLContentType = []string{"application/toml; charset=utf-8"} + +// Render (TOML) marshals the given interface object and writes data with custom ContentType. +func (r TOML) Render(w http.ResponseWriter) error { + r.WriteContentType(w) + + bytes, err := toml.Marshal(r.Data) + if err != nil { + return err + } + + _, err = w.Write(bytes) + return err +} + +// WriteContentType (TOML) writes TOML ContentType for response. +func (r TOML) WriteContentType(w http.ResponseWriter) { + writeContentType(w, TOMLContentType) +} diff --git a/index/server/vendor/github.com/gin-gonic/gin/render/xml.go b/index/server/vendor/github.com/gin-gonic/gin/render/xml.go index cc5390a2..6af89017 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/render/xml.go +++ b/index/server/vendor/github.com/gin-gonic/gin/render/xml.go @@ -1,4 +1,4 @@ -// Copyright 2014 Manu Martinez-Almeida. All rights reserved. +// Copyright 2014 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. @@ -11,7 +11,7 @@ import ( // XML contains the given interface object. type XML struct { - Data interface{} + Data any } var xmlContentType = []string{"application/xml; charset=utf-8"} diff --git a/index/server/vendor/github.com/gin-gonic/gin/render/yaml.go b/index/server/vendor/github.com/gin-gonic/gin/render/yaml.go index 0df78360..4f0ac01f 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/render/yaml.go +++ b/index/server/vendor/github.com/gin-gonic/gin/render/yaml.go @@ -1,4 +1,4 @@ -// Copyright 2014 Manu Martinez-Almeida. All rights reserved. +// Copyright 2014 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. @@ -12,7 +12,7 @@ import ( // YAML contains the given interface object. type YAML struct { - Data interface{} + Data any } var yamlContentType = []string{"application/x-yaml; charset=utf-8"} diff --git a/index/server/vendor/github.com/gin-gonic/gin/response_writer.go b/index/server/vendor/github.com/gin-gonic/gin/response_writer.go index 26826689..77c7ed8f 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/response_writer.go +++ b/index/server/vendor/github.com/gin-gonic/gin/response_writer.go @@ -1,4 +1,4 @@ -// Copyright 2014 Manu Martinez-Almeida. All rights reserved. +// Copyright 2014 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. @@ -23,23 +23,23 @@ type ResponseWriter interface { http.Flusher http.CloseNotifier - // Returns the HTTP response status code of the current request. + // Status returns the HTTP response status code of the current request. Status() int - // Returns the number of bytes already written into the response http body. + // Size returns the number of bytes already written into the response http body. // See Written() Size() int - // Writes the string into the response body. + // WriteString writes the string into the response body. WriteString(string) (int, error) - // Returns true if the response body was already written. + // Written returns true if the response body was already written. Written() bool - // Forces to write the http header (status code + headers). + // WriteHeaderNow forces to write the http header (status code + headers). WriteHeaderNow() - // get the http.Pusher for server push + // Pusher get the http.Pusher for server push Pusher() http.Pusher } @@ -107,12 +107,12 @@ func (w *responseWriter) Hijack() (net.Conn, *bufio.ReadWriter, error) { return w.ResponseWriter.(http.Hijacker).Hijack() } -// CloseNotify implements the http.CloseNotify interface. +// CloseNotify implements the http.CloseNotifier interface. func (w *responseWriter) CloseNotify() <-chan bool { return w.ResponseWriter.(http.CloseNotifier).CloseNotify() } -// Flush implements the http.Flush interface. +// Flush implements the http.Flusher interface. func (w *responseWriter) Flush() { w.WriteHeaderNow() w.ResponseWriter.(http.Flusher).Flush() diff --git a/index/server/vendor/github.com/gin-gonic/gin/routergroup.go b/index/server/vendor/github.com/gin-gonic/gin/routergroup.go index 15d9930d..3c082d93 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/routergroup.go +++ b/index/server/vendor/github.com/gin-gonic/gin/routergroup.go @@ -1,4 +1,4 @@ -// Copyright 2014 Manu Martinez-Almeida. All rights reserved. +// Copyright 2014 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. @@ -11,6 +11,18 @@ import ( "strings" ) +var ( + // regEnLetter matches english letters for http method name + regEnLetter = regexp.MustCompile("^[A-Z]+$") + + // anyMethods for RouterGroup Any method + anyMethods = []string{ + http.MethodGet, http.MethodPost, http.MethodPut, http.MethodPatch, + http.MethodHead, http.MethodOptions, http.MethodDelete, http.MethodConnect, + http.MethodTrace, + } +) + // IRouter defines all router handle interface includes single and group router. type IRouter interface { IRoutes @@ -32,6 +44,7 @@ type IRoutes interface { HEAD(string, ...HandlerFunc) IRoutes StaticFile(string, string) IRoutes + StaticFileFS(string, string, http.FileSystem) IRoutes Static(string, string) IRoutes StaticFS(string, http.FileSystem) IRoutes } @@ -87,7 +100,7 @@ func (group *RouterGroup) handle(httpMethod, relativePath string, handlers Handl // frequently used, non-standardized or custom methods (e.g. for internal // communication with a proxy). func (group *RouterGroup) Handle(httpMethod, relativePath string, handlers ...HandlerFunc) IRoutes { - if matches, err := regexp.MatchString("^[A-Z]+$", httpMethod); !matches || err != nil { + if matched := regEnLetter.MatchString(httpMethod); !matched { panic("http method " + httpMethod + " is not valid") } return group.handle(httpMethod, relativePath, handlers) @@ -131,27 +144,34 @@ func (group *RouterGroup) HEAD(relativePath string, handlers ...HandlerFunc) IRo // Any registers a route that matches all the HTTP methods. // GET, POST, PUT, PATCH, HEAD, OPTIONS, DELETE, CONNECT, TRACE. func (group *RouterGroup) Any(relativePath string, handlers ...HandlerFunc) IRoutes { - group.handle(http.MethodGet, relativePath, handlers) - group.handle(http.MethodPost, relativePath, handlers) - group.handle(http.MethodPut, relativePath, handlers) - group.handle(http.MethodPatch, relativePath, handlers) - group.handle(http.MethodHead, relativePath, handlers) - group.handle(http.MethodOptions, relativePath, handlers) - group.handle(http.MethodDelete, relativePath, handlers) - group.handle(http.MethodConnect, relativePath, handlers) - group.handle(http.MethodTrace, relativePath, handlers) + for _, method := range anyMethods { + group.handle(method, relativePath, handlers) + } + return group.returnObj() } // StaticFile registers a single route in order to serve a single file of the local filesystem. // router.StaticFile("favicon.ico", "./resources/favicon.ico") func (group *RouterGroup) StaticFile(relativePath, filepath string) IRoutes { + return group.staticFileHandler(relativePath, func(c *Context) { + c.File(filepath) + }) +} + +// StaticFileFS works just like `StaticFile` but a custom `http.FileSystem` can be used instead.. +// router.StaticFileFS("favicon.ico", "./resources/favicon.ico", Dir{".", false}) +// Gin by default user: gin.Dir() +func (group *RouterGroup) StaticFileFS(relativePath, filepath string, fs http.FileSystem) IRoutes { + return group.staticFileHandler(relativePath, func(c *Context) { + c.FileFromFS(filepath, fs) + }) +} + +func (group *RouterGroup) staticFileHandler(relativePath string, handler HandlerFunc) IRoutes { if strings.Contains(relativePath, ":") || strings.Contains(relativePath, "*") { panic("URL parameters can not be used when serving a static file") } - handler := func(c *Context) { - c.File(filepath) - } group.GET(relativePath, handler) group.HEAD(relativePath, handler) return group.returnObj() @@ -209,9 +229,7 @@ func (group *RouterGroup) createStaticHandler(relativePath string, fs http.FileS func (group *RouterGroup) combineHandlers(handlers HandlersChain) HandlersChain { finalSize := len(group.Handlers) + len(handlers) - if finalSize >= int(abortIndex) { - panic("too many handlers") - } + assert1(finalSize < int(abortIndex), "too many handlers") mergedHandlers := make(HandlersChain, finalSize) copy(mergedHandlers, group.Handlers) copy(mergedHandlers[len(group.Handlers):], handlers) diff --git a/index/server/vendor/github.com/gin-gonic/gin/test_helpers.go b/index/server/vendor/github.com/gin-gonic/gin/test_helpers.go index 3a7a5ddf..b3be93b4 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/test_helpers.go +++ b/index/server/vendor/github.com/gin-gonic/gin/test_helpers.go @@ -1,4 +1,4 @@ -// Copyright 2017 Manu Martinez-Almeida. All rights reserved. +// Copyright 2017 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. diff --git a/index/server/vendor/github.com/gin-gonic/gin/tree.go b/index/server/vendor/github.com/gin-gonic/gin/tree.go index 158a3390..88100eec 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/tree.go +++ b/index/server/vendor/github.com/gin-gonic/gin/tree.go @@ -31,8 +31,8 @@ type Param struct { // It is therefore safe to read values by the index. type Params []Param -// Get returns the value of the first Param which key matches the given name. -// If no matching Param is found, an empty string is returned. +// Get returns the value of the first Param which key matches the given name and a boolean true. +// If no matching Param is found, an empty string is returned and a boolean false . func (ps Params) Get(name string) (string, bool) { for _, entry := range ps { if entry.Key == name { @@ -81,7 +81,7 @@ func longestCommonPrefix(a, b string) int { return i } -// addChild will add a child node, keeping wildcards at the end +// addChild will add a child node, keeping wildcardChild at the end func (n *node) addChild(child *node) { if n.wildChild && len(n.children) > 0 { wildcardChild := n.children[len(n.children)-1] @@ -107,8 +107,7 @@ func countSections(path string) uint16 { type nodeType uint8 const ( - static nodeType = iota // default - root + root nodeType = iota + 1 param catchAll ) @@ -297,7 +296,7 @@ func (n *node) insertChild(path string, fullPath string, handlers HandlersChain) break } - // The wildcard name must not contain ':' and '*' + // The wildcard name must only contain one ':' or '*' character if !valid { panic("only one wildcard per path segment is allowed, has: '" + wildcard + "' in path '" + fullPath + "'") @@ -326,7 +325,7 @@ func (n *node) insertChild(path string, fullPath string, handlers HandlersChain) n.priority++ // if the path doesn't end with the wildcard, then there - // will be another non-wildcard subpath starting with '/' + // will be another subpath starting with '/' if len(wildcard) < len(path) { path = path[len(wildcard):] @@ -350,7 +349,12 @@ func (n *node) insertChild(path string, fullPath string, handlers HandlersChain) } if len(n.path) > 0 && n.path[len(n.path)-1] == '/' { - panic("catch-all conflicts with existing handle for the path segment root in path '" + fullPath + "'") + pathSeg := strings.SplitN(n.children[0].path, "/", 2)[0] + panic("catch-all wildcard '" + path + + "' in new path '" + fullPath + + "' conflicts with existing path segment '" + pathSeg + + "' in existing prefix '" + n.path + pathSeg + + "'") } // currently fixed width 1 for '/' @@ -531,7 +535,7 @@ walk: // Outer loop for walking the tree // No handle found. Check if a handle for this path + a // trailing slash exists for TSR recommendation n = n.children[0] - value.tsr = n.path == "/" && n.handlers != nil + value.tsr = (n.path == "/" && n.handlers != nil) || (n.path == "" && n.indices == "/") } return diff --git a/index/server/vendor/github.com/gin-gonic/gin/utils.go b/index/server/vendor/github.com/gin-gonic/gin/utils.go index c32f0eeb..4021a2ab 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/utils.go +++ b/index/server/vendor/github.com/gin-gonic/gin/utils.go @@ -1,4 +1,4 @@ -// Copyright 2014 Manu Martinez-Almeida. All rights reserved. +// Copyright 2014 Manu Martinez-Almeida. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. @@ -12,13 +12,14 @@ import ( "reflect" "runtime" "strings" + "unicode" ) // BindKey indicates a default bind key. const BindKey = "_gin-gonic/gin/bindkey" // Bind is a helper function for given interface object and returns a Gin middleware. -func Bind(val interface{}) HandlerFunc { +func Bind(val any) HandlerFunc { value := reflect.ValueOf(val) if value.Kind() == reflect.Ptr { panic(`Bind struct can not be a pointer. Example: @@ -50,7 +51,7 @@ func WrapH(h http.Handler) HandlerFunc { } // H is a shortcut for map[string]interface{} -type H map[string]interface{} +type H map[string]any // MarshalXML allows type H to be used with xml.Marshal. func (h H) MarshalXML(e *xml.Encoder, start xml.StartElement) error { @@ -89,7 +90,7 @@ func filterFlags(content string) string { return content } -func chooseData(custom, wildcard interface{}) interface{} { +func chooseData(custom, wildcard any) any { if custom != nil { return custom } @@ -120,7 +121,7 @@ func lastChar(str string) uint8 { return str[len(str)-1] } -func nameOfFunction(f interface{}) string { +func nameOfFunction(f any) string { return runtime.FuncForPC(reflect.ValueOf(f).Pointer()).Name() } @@ -151,3 +152,13 @@ func resolveAddress(addr []string) string { panic("too many parameters") } } + +// https://stackoverflow.com/questions/53069040/checking-a-string-contains-only-ascii-characters +func isASCII(s string) bool { + for i := 0; i < len(s); i++ { + if s[i] > unicode.MaxASCII { + return false + } + } + return true +} diff --git a/index/server/vendor/github.com/gin-gonic/gin/version.go b/index/server/vendor/github.com/gin-gonic/gin/version.go index 4b69b9b9..632ca7d1 100644 --- a/index/server/vendor/github.com/gin-gonic/gin/version.go +++ b/index/server/vendor/github.com/gin-gonic/gin/version.go @@ -1,8 +1,8 @@ -// Copyright 2018 Gin Core Team. All rights reserved. +// Copyright 2018 Gin Core Team. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. package gin // Version is the current gin framework's version. -const Version = "v1.7.7" +const Version = "v1.8.1" diff --git a/index/server/vendor/github.com/go-openapi/swag/.gitattributes b/index/server/vendor/github.com/go-openapi/swag/.gitattributes new file mode 100644 index 00000000..49ad5276 --- /dev/null +++ b/index/server/vendor/github.com/go-openapi/swag/.gitattributes @@ -0,0 +1,2 @@ +# gofmt always uses LF, whereas Git uses CRLF on Windows. +*.go text eol=lf diff --git a/index/server/vendor/github.com/go-openapi/swag/.golangci.yml b/index/server/vendor/github.com/go-openapi/swag/.golangci.yml index 813c47aa..2a4a71f3 100644 --- a/index/server/vendor/github.com/go-openapi/swag/.golangci.yml +++ b/index/server/vendor/github.com/go-openapi/swag/.golangci.yml @@ -37,3 +37,14 @@ linters: - gci - gocognit - paralleltest + - thelper + - ifshort + - gomoddirectives + - cyclop + - forcetypeassert + - ireturn + - tagliatelle + - varnamelen + - goimports + - tenv + - golint diff --git a/index/server/vendor/github.com/go-openapi/swag/.travis.yml b/index/server/vendor/github.com/go-openapi/swag/.travis.yml deleted file mode 100644 index fc25a887..00000000 --- a/index/server/vendor/github.com/go-openapi/swag/.travis.yml +++ /dev/null @@ -1,37 +0,0 @@ -after_success: -- bash <(curl -s https://codecov.io/bash) -go: -- 1.14.x -- 1.x -arch: -- amd64 -jobs: - include: - # include arch ppc, but only for latest go version - skip testing for race - - go: 1.x - arch: ppc64le - install: ~ - script: - - go test -v - - #- go: 1.x - # arch: arm - # install: ~ - # script: - # - go test -v - - # include linting job, but only for latest go version and amd64 arch - - go: 1.x - arch: amd64 - install: - go get github.com/golangci/golangci-lint/cmd/golangci-lint - script: - - golangci-lint run --new-from-rev master -install: -- GO111MODULE=off go get -u gotest.tools/gotestsum -language: go -notifications: - slack: - secure: QUWvCkBBK09GF7YtEvHHVt70JOkdlNBG0nIKu/5qc4/nW5HP8I2w0SEf/XR2je0eED1Qe3L/AfMCWwrEj+IUZc3l4v+ju8X8R3Lomhme0Eb0jd1MTMCuPcBT47YCj0M7RON7vXtbFfm1hFJ/jLe5+9FXz0hpXsR24PJc5ZIi/ogNwkaPqG4BmndzecpSh0vc2FJPZUD9LT0I09REY/vXR0oQAalLkW0asGD5taHZTUZq/kBpsNxaAFrLM23i4mUcf33M5fjLpvx5LRICrX/57XpBrDh2TooBU6Qj3CgoY0uPRYUmSNxbVx1czNzl2JtEpb5yjoxfVPQeg0BvQM00G8LJINISR+ohrjhkZmAqchDupAX+yFrxTtORa78CtnIL6z/aTNlgwwVD8kvL/1pFA/JWYmKDmz93mV/+6wubGzNSQCstzjkFA4/iZEKewKUoRIAi/fxyscP6L/rCpmY/4llZZvrnyTqVbt6URWpopUpH4rwYqreXAtJxJsfBJIeSmUIiDIOMGkCTvyTEW3fWGmGoqWtSHLoaWDyAIGb7azb+KvfpWtEcoPFWfSWU+LGee0A/YsUhBl7ADB9A0CJEuR8q4BPpKpfLwPKSiKSAXL7zDkyjExyhtgqbSl2jS+rKIHOZNL8JkCcTP2MKMVd563C5rC5FMKqu3S9m2b6380E= -script: -- gotestsum -f short-verbose -- -race -coverprofile=coverage.txt -covermode=atomic ./... diff --git a/index/server/vendor/github.com/go-openapi/swag/file.go b/index/server/vendor/github.com/go-openapi/swag/file.go new file mode 100644 index 00000000..16accc55 --- /dev/null +++ b/index/server/vendor/github.com/go-openapi/swag/file.go @@ -0,0 +1,33 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package swag + +import "mime/multipart" + +// File represents an uploaded file. +type File struct { + Data multipart.File + Header *multipart.FileHeader +} + +// Read bytes from the file +func (f *File) Read(p []byte) (n int, err error) { + return f.Data.Read(p) +} + +// Close the file +func (f *File) Close() error { + return f.Data.Close() +} diff --git a/index/server/vendor/github.com/go-openapi/swag/post_go18.go b/index/server/vendor/github.com/go-openapi/swag/post_go18.go index c2e686d3..f5228b82 100644 --- a/index/server/vendor/github.com/go-openapi/swag/post_go18.go +++ b/index/server/vendor/github.com/go-openapi/swag/post_go18.go @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +//go:build go1.8 // +build go1.8 package swag diff --git a/index/server/vendor/github.com/go-openapi/swag/post_go19.go b/index/server/vendor/github.com/go-openapi/swag/post_go19.go index eb2f2d8b..7c7da9c0 100644 --- a/index/server/vendor/github.com/go-openapi/swag/post_go19.go +++ b/index/server/vendor/github.com/go-openapi/swag/post_go19.go @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +//go:build go1.9 // +build go1.9 package swag diff --git a/index/server/vendor/github.com/go-openapi/swag/pre_go18.go b/index/server/vendor/github.com/go-openapi/swag/pre_go18.go index 6607f339..2757d9b9 100644 --- a/index/server/vendor/github.com/go-openapi/swag/pre_go18.go +++ b/index/server/vendor/github.com/go-openapi/swag/pre_go18.go @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +//go:build !go1.8 // +build !go1.8 package swag diff --git a/index/server/vendor/github.com/go-openapi/swag/pre_go19.go b/index/server/vendor/github.com/go-openapi/swag/pre_go19.go index 4bae187d..0565db37 100644 --- a/index/server/vendor/github.com/go-openapi/swag/pre_go19.go +++ b/index/server/vendor/github.com/go-openapi/swag/pre_go19.go @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +//go:build !go1.9 // +build !go1.9 package swag diff --git a/index/server/vendor/github.com/go-playground/locales/README.md b/index/server/vendor/github.com/go-playground/locales/README.md index ba1b0680..5b0694fd 100644 --- a/index/server/vendor/github.com/go-playground/locales/README.md +++ b/index/server/vendor/github.com/go-playground/locales/README.md @@ -1,5 +1,5 @@ ## locales -![Project status](https://img.shields.io/badge/version-0.13.0-green.svg) +![Project status](https://img.shields.io/badge/version-0.14.0-green.svg) [![Build Status](https://travis-ci.org/go-playground/locales.svg?branch=master)](https://travis-ci.org/go-playground/locales) [![Go Report Card](https://goreportcard.com/badge/github.com/go-playground/locales)](https://goreportcard.com/report/github.com/go-playground/locales) [![GoDoc](https://godoc.org/github.com/go-playground/locales?status.svg)](https://godoc.org/github.com/go-playground/locales) @@ -11,7 +11,7 @@ an i18n package; these were built for use with, but not exclusive to, [Universal Features -------- -- [x] Rules generated from the latest [CLDR](http://cldr.unicode.org/index/downloads) data, v31.0.1 +- [x] Rules generated from the latest [CLDR](http://cldr.unicode.org/index/downloads) data, v36.0.1 - [x] Contains Cardinal, Ordinal and Range Plural Rules - [x] Contains Month, Weekday and Timezone translations built in - [x] Contains Date & Time formatting functions diff --git a/index/server/vendor/github.com/go-playground/locales/currency/currency.go b/index/server/vendor/github.com/go-playground/locales/currency/currency.go index cdaba596..b5a95fb0 100644 --- a/index/server/vendor/github.com/go-playground/locales/currency/currency.go +++ b/index/server/vendor/github.com/go-playground/locales/currency/currency.go @@ -176,6 +176,7 @@ const ( MNT MOP MRO + MRU MTL MTP MUR @@ -262,9 +263,11 @@ const ( UYI UYP UYU + UYW UZS VEB VEF + VES VND VNN VUV diff --git a/index/server/vendor/github.com/go-playground/universal-translator/Makefile b/index/server/vendor/github.com/go-playground/universal-translator/Makefile new file mode 100644 index 00000000..ec3455bd --- /dev/null +++ b/index/server/vendor/github.com/go-playground/universal-translator/Makefile @@ -0,0 +1,18 @@ +GOCMD=GO111MODULE=on go + +linters-install: + @golangci-lint --version >/dev/null 2>&1 || { \ + echo "installing linting tools..."; \ + curl -sfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh| sh -s v1.41.1; \ + } + +lint: linters-install + golangci-lint run + +test: + $(GOCMD) test -cover -race ./... + +bench: + $(GOCMD) test -bench=. -benchmem ./... + +.PHONY: test lint linters-install \ No newline at end of file diff --git a/index/server/vendor/github.com/go-playground/universal-translator/README.md b/index/server/vendor/github.com/go-playground/universal-translator/README.md index 071f33ab..46dec6d2 100644 --- a/index/server/vendor/github.com/go-playground/universal-translator/README.md +++ b/index/server/vendor/github.com/go-playground/universal-translator/README.md @@ -1,5 +1,5 @@ ## universal-translator -![Project status](https://img.shields.io/badge/version-0.17.0-green.svg) +![Project status](https://img.shields.io/badge/version-0.18.0-green.svg) [![Build Status](https://travis-ci.org/go-playground/universal-translator.svg?branch=master)](https://travis-ci.org/go-playground/universal-translator) [![Coverage Status](https://coveralls.io/repos/github/go-playground/universal-translator/badge.svg)](https://coveralls.io/github/go-playground/universal-translator) [![Go Report Card](https://goreportcard.com/badge/github.com/go-playground/universal-translator)](https://goreportcard.com/report/github.com/go-playground/universal-translator) @@ -18,7 +18,7 @@ use in your applications. Features -------- -- [x] Rules generated from the [CLDR](http://cldr.unicode.org/index/downloads) data, v30.0.3 +- [x] Rules generated from the [CLDR](http://cldr.unicode.org/index/downloads) data, v36.0.1 - [x] Contains Cardinal, Ordinal and Range Plural Rules - [x] Contains Month, Weekday and Timezone translations built in - [x] Contains Date & Time formatting functions @@ -51,7 +51,7 @@ Please see https://godoc.org/github.com/go-playground/universal-translator for u File formatting -------------- -All types, Plain substitution, Cardinal, Ordinal and Range translations can all be contained withing the same file(s); +All types, Plain substitution, Cardinal, Ordinal and Range translations can all be contained within the same file(s); they are only separated for easy viewing. ##### Examples: diff --git a/index/server/vendor/github.com/go-playground/universal-translator/import_export.go b/index/server/vendor/github.com/go-playground/universal-translator/import_export.go index 7bd76f26..1216f192 100644 --- a/index/server/vendor/github.com/go-playground/universal-translator/import_export.go +++ b/index/server/vendor/github.com/go-playground/universal-translator/import_export.go @@ -257,6 +257,8 @@ func (t *UniversalTranslator) ImportByReader(format ImportExportFormat, reader i func stringToPR(s string) locales.PluralRule { switch s { + case "Zero": + return locales.PluralRuleZero case "One": return locales.PluralRuleOne case "Two": diff --git a/index/server/vendor/github.com/go-playground/universal-translator/translator.go b/index/server/vendor/github.com/go-playground/universal-translator/translator.go index cfafce8a..24b18db9 100644 --- a/index/server/vendor/github.com/go-playground/universal-translator/translator.go +++ b/index/server/vendor/github.com/go-playground/universal-translator/translator.go @@ -159,13 +159,13 @@ func (t *translator) AddCardinal(key interface{}, text string, rule locales.Plur } } else { - tarr = make([]*transText, 7, 7) + tarr = make([]*transText, 7) t.cardinalTanslations[key] = tarr } trans := &transText{ text: text, - indexes: make([]int, 2, 2), + indexes: make([]int, 2), } tarr[rule] = trans @@ -211,13 +211,13 @@ func (t *translator) AddOrdinal(key interface{}, text string, rule locales.Plura } } else { - tarr = make([]*transText, 7, 7) + tarr = make([]*transText, 7) t.ordinalTanslations[key] = tarr } trans := &transText{ text: text, - indexes: make([]int, 2, 2), + indexes: make([]int, 2), } tarr[rule] = trans @@ -261,13 +261,13 @@ func (t *translator) AddRange(key interface{}, text string, rule locales.PluralR } } else { - tarr = make([]*transText, 7, 7) + tarr = make([]*transText, 7) t.rangeTanslations[key] = tarr } trans := &transText{ text: text, - indexes: make([]int, 4, 4), + indexes: make([]int, 4), } tarr[rule] = trans diff --git a/index/server/vendor/github.com/go-playground/validator/v10/.travis.yml b/index/server/vendor/github.com/go-playground/validator/v10/.travis.yml deleted file mode 100644 index 85a7be34..00000000 --- a/index/server/vendor/github.com/go-playground/validator/v10/.travis.yml +++ /dev/null @@ -1,29 +0,0 @@ -language: go -go: - - 1.15.2 - - tip -matrix: - allow_failures: - - go: tip - -notifications: - email: - recipients: dean.karn@gmail.com - on_success: change - on_failure: always - -before_install: - - go install github.com/mattn/goveralls - - mkdir -p $GOPATH/src/gopkg.in - - ln -s $GOPATH/src/github.com/$TRAVIS_REPO_SLUG $GOPATH/src/gopkg.in/validator.v9 - -# Only clone the most recent commit. -git: - depth: 1 - -script: - - go test -v -race -covermode=atomic -coverprofile=coverage.coverprofile ./... - -after_success: | - [ $TRAVIS_GO_VERSION = 1.15.2 ] && - goveralls -coverprofile=coverage.coverprofile -service travis-ci -repotoken $COVERALLS_TOKEN diff --git a/index/server/vendor/github.com/go-playground/validator/v10/MAINTAINERS.md b/index/server/vendor/github.com/go-playground/validator/v10/MAINTAINERS.md new file mode 100644 index 00000000..b809c4ce --- /dev/null +++ b/index/server/vendor/github.com/go-playground/validator/v10/MAINTAINERS.md @@ -0,0 +1,16 @@ +## Maintainers Guide + +### Semantic Versioning +Semantic versioning as defined [here](https://semver.org) must be strictly adhered to. + +### External Dependencies +Any new external dependencies MUST: +- Have a compatible LICENSE present. +- Be actively maintained. +- Be approved by @go-playground/admins + +### PR Merge Requirements +- Up-to-date branch. +- Passing tests and linting. +- CODEOWNERS approval. +- Tests that cover both the Happy and Unhappy paths. \ No newline at end of file diff --git a/index/server/vendor/github.com/go-playground/validator/v10/Makefile b/index/server/vendor/github.com/go-playground/validator/v10/Makefile index 19c91ed7..ec3455bd 100644 --- a/index/server/vendor/github.com/go-playground/validator/v10/Makefile +++ b/index/server/vendor/github.com/go-playground/validator/v10/Makefile @@ -3,11 +3,11 @@ GOCMD=GO111MODULE=on go linters-install: @golangci-lint --version >/dev/null 2>&1 || { \ echo "installing linting tools..."; \ - curl -sfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh| sh -s v1.21.0; \ + curl -sfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh| sh -s v1.41.1; \ } lint: linters-install - $(PWD)/bin/golangci-lint run + golangci-lint run test: $(GOCMD) test -cover -race ./... diff --git a/index/server/vendor/github.com/go-playground/validator/v10/README.md b/index/server/vendor/github.com/go-playground/validator/v10/README.md index 04fbb3c8..9d0a79e9 100644 --- a/index/server/vendor/github.com/go-playground/validator/v10/README.md +++ b/index/server/vendor/github.com/go-playground/validator/v10/README.md @@ -1,7 +1,7 @@ Package validator -================ +================= [![Join the chat at https://gitter.im/go-playground/validator](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/go-playground/validator?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) -![Project status](https://img.shields.io/badge/version-10.4.1-green.svg) +![Project status](https://img.shields.io/badge/version-10.11.1-green.svg) [![Build Status](https://travis-ci.org/go-playground/validator.svg?branch=master)](https://travis-ci.org/go-playground/validator) [![Coverage Status](https://coveralls.io/repos/go-playground/validator/badge.svg?branch=master&service=github)](https://coveralls.io/github/go-playground/validator?branch=master) [![Go Report Card](https://goreportcard.com/badge/github.com/go-playground/validator)](https://goreportcard.com/report/github.com/go-playground/validator) @@ -43,7 +43,7 @@ They return type error to avoid the issue discussed in the following, where err * http://stackoverflow.com/a/29138676/3158232 * https://github.com/go-playground/validator/issues/134 -Validator only InvalidValidationError for bad validation input, nil or ValidationErrors as type error; so, in your code all you need to do is check if the error returned is not nil, and if it's not check if error is InvalidValidationError ( if necessary, most of the time it isn't ) type cast it to type ValidationErrors like so: +Validator returns only InvalidValidationError for bad validation input, nil or ValidationErrors as type error; so, in your code all you need to do is check if the error returned is not nil, and if it's not check if error is InvalidValidationError ( if necessary, most of the time it isn't ) type cast it to type ValidationErrors like so: ```go err := validate.Struct(mystruct) @@ -53,7 +53,7 @@ validationErrors := err.(validator.ValidationErrors) Usage and documentation ------ -Please see https://godoc.org/github.com/go-playground/validator for detailed usage docs. +Please see https://pkg.go.dev/github.com/go-playground/validator/v10 for detailed usage docs. ##### Examples: @@ -100,7 +100,7 @@ Baked-in Validations | hostname_rfc1123 | Hostname RFC 1123 | | ip | Internet Protocol Address IP | | ip4_addr | Internet Protocol Address IPv4 | -| ip6_addr |Internet Protocol Address IPv6 | +| ip6_addr | Internet Protocol Address IPv6 | | ip_addr | Internet Protocol Address IP | | ipv4 | Internet Protocol Address IPv4 | | ipv6 | Internet Protocol Address IPv6 | @@ -126,15 +126,21 @@ Baked-in Validations | alphanumunicode | Alphanumeric Unicode | | alphaunicode | Alpha Unicode | | ascii | ASCII | +| boolean | Boolean | | contains | Contains | | containsany | Contains Any | | containsrune | Contains Rune | +| endsnotwith | Ends Not With | | endswith | Ends With | +| excludes | Excludes | +| excludesall | Excludes All | +| excludesrune | Excludes Rune | | lowercase | Lowercase | | multibyte | Multi-Byte Characters | | number | NOT DOCUMENTED IN doc.go | | numeric | Numeric | | printascii | Printable ASCII | +| startsnotwith | Starts Not With | | startswith | Starts With | | uppercase | Uppercase | @@ -143,8 +149,11 @@ Baked-in Validations | - | - | | base64 | Base64 String | | base64url | Base64URL String | +| bic | Business Identifier Code (ISO 9362) | +| bcp47_language_tag | Language tag (BCP 47) | | btc_addr | Bitcoin Address | | btc_addr_bech32 | Bitcoin Bech32 Address (segwit) | +| credit_card | Credit Card Number | | datetime | Datetime | | e164 | e164 formatted phone number | | email | E-mail String @@ -158,12 +167,21 @@ Baked-in Validations | isbn | International Standard Book Number | | isbn10 | International Standard Book Number 10 | | isbn13 | International Standard Book Number 13 | +| iso3166_1_alpha2 | Two-letter country code (ISO 3166-1 alpha-2) | +| iso3166_1_alpha3 | Three-letter country code (ISO 3166-1 alpha-3) | +| iso3166_1_alpha_numeric | Numeric country code (ISO 3166-1 numeric) | +| iso3166_2 | Country subdivision code (ISO 3166-2) | +| iso4217 | Currency code (ISO 4217) | | json | JSON | +| jwt | JSON Web Token (JWT) | | latitude | Latitude | | longitude | Longitude | +| postcode_iso3166_alpha2 | Postcode | +| postcode_iso3166_alpha2_field | Postcode | | rgb | RGB String | | rgba | RGBA String | | ssn | Social Security Number SSN | +| timezone | Timezone | | uuid | Universally Unique Identifier UUID | | uuid3 | Universally Unique Identifier UUID v3 | | uuid3_rfc4122 | Universally Unique Identifier UUID v3 RFC4122 | @@ -172,13 +190,25 @@ Baked-in Validations | uuid5 | Universally Unique Identifier UUID v5 | | uuid5_rfc4122 | Universally Unique Identifier UUID v5 RFC4122 | | uuid_rfc4122 | Universally Unique Identifier UUID RFC4122 | +| md4 | MD4 hash | +| md5 | MD5 hash | +| sha256 | SHA256 hash | +| sha384 | SHA384 hash | +| sha512 | SHA512 hash | +| ripemd128 | RIPEMD-128 hash | +| ripemd128 | RIPEMD-160 hash | +| tiger128 | TIGER128 hash | +| tiger160 | TIGER160 hash | +| tiger192 | TIGER192 hash | +| semver | Semantic Versioning 2.0.0 | +| ulid | Universally Unique Lexicographically Sortable Identifier ULID | ### Comparisons: | Tag | Description | | - | - | | eq | Equals | | gt | Greater than| -| gte |Greater than or equal | +| gte | Greater than or equal | | lt | Less Than | | lte | Less Than or Equal | | ne | Not Equal | @@ -187,10 +217,6 @@ Baked-in Validations | Tag | Description | | - | - | | dir | Directory | -| endswith | Ends With | -| excludes | Excludes | -| excludesall | Excludes All | -| excludesrune | Excludes Rune | | file | File path | | isdefault | Is Default | | len | Length | @@ -204,12 +230,20 @@ Baked-in Validations | required_with_all | Required With All | | required_without | Required Without | | required_without_all | Required Without All | +| excluded_if | Excluded If | +| excluded_unless | Excluded Unless | | excluded_with | Excluded With | | excluded_with_all | Excluded With All | | excluded_without | Excluded Without | | excluded_without_all | Excluded Without All | | unique | Unique | +#### Aliases: +| Tag | Description | +| - | - | +| iscolor | hexcolor\|rgb\|rgba\|hsl\|hsla | +| country_code | iso3166_1_alpha2\|iso3166_1_alpha3\|iso3166_1_alpha_numeric | + Benchmarks ------ ###### Run on MacBook Pro (15-inch, 2017) go version go1.10.2 darwin/amd64 @@ -295,5 +329,10 @@ How to Contribute Make a pull request... License ------- +------- Distributed under MIT License, please see license file within the code for more details. + +Maintainers +----------- +This project has grown large enough that more than one person is required to properly support the community. +If you are interested in becoming a maintainer please reach out to me https://github.com/deankarn diff --git a/index/server/vendor/github.com/go-playground/validator/v10/baked_in.go b/index/server/vendor/github.com/go-playground/validator/v10/baked_in.go index 6ce762d1..c9b1db40 100644 --- a/index/server/vendor/github.com/go-playground/validator/v10/baked_in.go +++ b/index/server/vendor/github.com/go-playground/validator/v10/baked_in.go @@ -18,6 +18,7 @@ import ( "unicode/utf8" "golang.org/x/crypto/sha3" + "golang.org/x/text/language" urn "github.com/leodido/go-urn" ) @@ -55,7 +56,7 @@ var ( isdefault: {}, } - // BakedInAliasValidators is a default mapping of a single validation tag that + // bakedInAliases is a default mapping of a single validation tag that // defines a common or complex set of validation(s) to simplify // adding validation to structs. bakedInAliases = map[string]string{ @@ -63,136 +64,163 @@ var ( "country_code": "iso3166_1_alpha2|iso3166_1_alpha3|iso3166_1_alpha_numeric", } - // BakedInValidators is the default map of ValidationFunc + // bakedInValidators is the default map of ValidationFunc // you can add, remove or even replace items to suite your needs, // or even disregard and use your own map if so desired. bakedInValidators = map[string]Func{ - "required": hasValue, - "required_if": requiredIf, - "required_unless": requiredUnless, - "required_with": requiredWith, - "required_with_all": requiredWithAll, - "required_without": requiredWithout, - "required_without_all": requiredWithoutAll, - "excluded_with": excludedWith, - "excluded_with_all": excludedWithAll, - "excluded_without": excludedWithout, - "excluded_without_all": excludedWithoutAll, - "isdefault": isDefault, - "len": hasLengthOf, - "min": hasMinOf, - "max": hasMaxOf, - "eq": isEq, - "ne": isNe, - "lt": isLt, - "lte": isLte, - "gt": isGt, - "gte": isGte, - "eqfield": isEqField, - "eqcsfield": isEqCrossStructField, - "necsfield": isNeCrossStructField, - "gtcsfield": isGtCrossStructField, - "gtecsfield": isGteCrossStructField, - "ltcsfield": isLtCrossStructField, - "ltecsfield": isLteCrossStructField, - "nefield": isNeField, - "gtefield": isGteField, - "gtfield": isGtField, - "ltefield": isLteField, - "ltfield": isLtField, - "fieldcontains": fieldContains, - "fieldexcludes": fieldExcludes, - "alpha": isAlpha, - "alphanum": isAlphanum, - "alphaunicode": isAlphaUnicode, - "alphanumunicode": isAlphanumUnicode, - "numeric": isNumeric, - "number": isNumber, - "hexadecimal": isHexadecimal, - "hexcolor": isHEXColor, - "rgb": isRGB, - "rgba": isRGBA, - "hsl": isHSL, - "hsla": isHSLA, - "e164": isE164, - "email": isEmail, - "url": isURL, - "uri": isURI, - "urn_rfc2141": isUrnRFC2141, // RFC 2141 - "file": isFile, - "base64": isBase64, - "base64url": isBase64URL, - "contains": contains, - "containsany": containsAny, - "containsrune": containsRune, - "excludes": excludes, - "excludesall": excludesAll, - "excludesrune": excludesRune, - "startswith": startsWith, - "endswith": endsWith, - "startsnotwith": startsNotWith, - "endsnotwith": endsNotWith, - "isbn": isISBN, - "isbn10": isISBN10, - "isbn13": isISBN13, - "eth_addr": isEthereumAddress, - "btc_addr": isBitcoinAddress, - "btc_addr_bech32": isBitcoinBech32Address, - "uuid": isUUID, - "uuid3": isUUID3, - "uuid4": isUUID4, - "uuid5": isUUID5, - "uuid_rfc4122": isUUIDRFC4122, - "uuid3_rfc4122": isUUID3RFC4122, - "uuid4_rfc4122": isUUID4RFC4122, - "uuid5_rfc4122": isUUID5RFC4122, - "ascii": isASCII, - "printascii": isPrintableASCII, - "multibyte": hasMultiByteCharacter, - "datauri": isDataURI, - "latitude": isLatitude, - "longitude": isLongitude, - "ssn": isSSN, - "ipv4": isIPv4, - "ipv6": isIPv6, - "ip": isIP, - "cidrv4": isCIDRv4, - "cidrv6": isCIDRv6, - "cidr": isCIDR, - "tcp4_addr": isTCP4AddrResolvable, - "tcp6_addr": isTCP6AddrResolvable, - "tcp_addr": isTCPAddrResolvable, - "udp4_addr": isUDP4AddrResolvable, - "udp6_addr": isUDP6AddrResolvable, - "udp_addr": isUDPAddrResolvable, - "ip4_addr": isIP4AddrResolvable, - "ip6_addr": isIP6AddrResolvable, - "ip_addr": isIPAddrResolvable, - "unix_addr": isUnixAddrResolvable, - "mac": isMAC, - "hostname": isHostnameRFC952, // RFC 952 - "hostname_rfc1123": isHostnameRFC1123, // RFC 1123 - "fqdn": isFQDN, - "unique": isUnique, - "oneof": isOneOf, - "html": isHTML, - "html_encoded": isHTMLEncoded, - "url_encoded": isURLEncoded, - "dir": isDir, - "json": isJSON, - "hostname_port": isHostnamePort, - "lowercase": isLowercase, - "uppercase": isUppercase, - "datetime": isDatetime, - "timezone": isTimeZone, - "iso3166_1_alpha2": isIso3166Alpha2, - "iso3166_1_alpha3": isIso3166Alpha3, - "iso3166_1_alpha_numeric": isIso3166AlphaNumeric, + "required": hasValue, + "required_if": requiredIf, + "required_unless": requiredUnless, + "required_with": requiredWith, + "required_with_all": requiredWithAll, + "required_without": requiredWithout, + "required_without_all": requiredWithoutAll, + "excluded_if": excludedIf, + "excluded_unless": excludedUnless, + "excluded_with": excludedWith, + "excluded_with_all": excludedWithAll, + "excluded_without": excludedWithout, + "excluded_without_all": excludedWithoutAll, + "isdefault": isDefault, + "len": hasLengthOf, + "min": hasMinOf, + "max": hasMaxOf, + "eq": isEq, + "ne": isNe, + "lt": isLt, + "lte": isLte, + "gt": isGt, + "gte": isGte, + "eqfield": isEqField, + "eqcsfield": isEqCrossStructField, + "necsfield": isNeCrossStructField, + "gtcsfield": isGtCrossStructField, + "gtecsfield": isGteCrossStructField, + "ltcsfield": isLtCrossStructField, + "ltecsfield": isLteCrossStructField, + "nefield": isNeField, + "gtefield": isGteField, + "gtfield": isGtField, + "ltefield": isLteField, + "ltfield": isLtField, + "fieldcontains": fieldContains, + "fieldexcludes": fieldExcludes, + "alpha": isAlpha, + "alphanum": isAlphanum, + "alphaunicode": isAlphaUnicode, + "alphanumunicode": isAlphanumUnicode, + "boolean": isBoolean, + "numeric": isNumeric, + "number": isNumber, + "hexadecimal": isHexadecimal, + "hexcolor": isHEXColor, + "rgb": isRGB, + "rgba": isRGBA, + "hsl": isHSL, + "hsla": isHSLA, + "e164": isE164, + "email": isEmail, + "url": isURL, + "uri": isURI, + "urn_rfc2141": isUrnRFC2141, // RFC 2141 + "file": isFile, + "base64": isBase64, + "base64url": isBase64URL, + "contains": contains, + "containsany": containsAny, + "containsrune": containsRune, + "excludes": excludes, + "excludesall": excludesAll, + "excludesrune": excludesRune, + "startswith": startsWith, + "endswith": endsWith, + "startsnotwith": startsNotWith, + "endsnotwith": endsNotWith, + "isbn": isISBN, + "isbn10": isISBN10, + "isbn13": isISBN13, + "eth_addr": isEthereumAddress, + "btc_addr": isBitcoinAddress, + "btc_addr_bech32": isBitcoinBech32Address, + "uuid": isUUID, + "uuid3": isUUID3, + "uuid4": isUUID4, + "uuid5": isUUID5, + "uuid_rfc4122": isUUIDRFC4122, + "uuid3_rfc4122": isUUID3RFC4122, + "uuid4_rfc4122": isUUID4RFC4122, + "uuid5_rfc4122": isUUID5RFC4122, + "ulid": isULID, + "md4": isMD4, + "md5": isMD5, + "sha256": isSHA256, + "sha384": isSHA384, + "sha512": isSHA512, + "ripemd128": isRIPEMD128, + "ripemd160": isRIPEMD160, + "tiger128": isTIGER128, + "tiger160": isTIGER160, + "tiger192": isTIGER192, + "ascii": isASCII, + "printascii": isPrintableASCII, + "multibyte": hasMultiByteCharacter, + "datauri": isDataURI, + "latitude": isLatitude, + "longitude": isLongitude, + "ssn": isSSN, + "ipv4": isIPv4, + "ipv6": isIPv6, + "ip": isIP, + "cidrv4": isCIDRv4, + "cidrv6": isCIDRv6, + "cidr": isCIDR, + "tcp4_addr": isTCP4AddrResolvable, + "tcp6_addr": isTCP6AddrResolvable, + "tcp_addr": isTCPAddrResolvable, + "udp4_addr": isUDP4AddrResolvable, + "udp6_addr": isUDP6AddrResolvable, + "udp_addr": isUDPAddrResolvable, + "ip4_addr": isIP4AddrResolvable, + "ip6_addr": isIP6AddrResolvable, + "ip_addr": isIPAddrResolvable, + "unix_addr": isUnixAddrResolvable, + "mac": isMAC, + "hostname": isHostnameRFC952, // RFC 952 + "hostname_rfc1123": isHostnameRFC1123, // RFC 1123 + "fqdn": isFQDN, + "unique": isUnique, + "oneof": isOneOf, + "html": isHTML, + "html_encoded": isHTMLEncoded, + "url_encoded": isURLEncoded, + "dir": isDir, + "json": isJSON, + "jwt": isJWT, + "hostname_port": isHostnamePort, + "lowercase": isLowercase, + "uppercase": isUppercase, + "datetime": isDatetime, + "timezone": isTimeZone, + "iso3166_1_alpha2": isIso3166Alpha2, + "iso3166_1_alpha3": isIso3166Alpha3, + "iso3166_1_alpha_numeric": isIso3166AlphaNumeric, + "iso3166_2": isIso31662, + "iso4217": isIso4217, + "iso4217_numeric": isIso4217Numeric, + "bcp47_language_tag": isBCP47LanguageTag, + "postcode_iso3166_alpha2": isPostcodeByIso3166Alpha2, + "postcode_iso3166_alpha2_field": isPostcodeByIso3166Alpha2Field, + "bic": isIsoBicFormat, + "semver": isSemverFormat, + "dns_rfc1035_label": isDnsRFC1035LabelFormat, + "credit_card": isCreditCard, } ) -var oneofValsCache = map[string][]string{} -var oneofValsCacheRWLock = sync.RWMutex{} +var ( + oneofValsCache = map[string][]string{} + oneofValsCacheRWLock = sync.RWMutex{} +) func parseOneOfParam2(s string) []string { oneofValsCacheRWLock.RLock() @@ -248,7 +276,6 @@ func isOneOf(fl FieldLevel) bool { // isUnique is the validation function for validating if each array|slice|map value is unique func isUnique(fl FieldLevel) bool { - field := fl.Field() param := fl.Param() v := reflect.ValueOf(struct{}{}) @@ -296,65 +323,57 @@ func isUnique(fl FieldLevel) bool { } } -// IsMAC is the validation function for validating if the field's value is a valid MAC address. +// isMAC is the validation function for validating if the field's value is a valid MAC address. func isMAC(fl FieldLevel) bool { - _, err := net.ParseMAC(fl.Field().String()) return err == nil } -// IsCIDRv4 is the validation function for validating if the field's value is a valid v4 CIDR address. +// isCIDRv4 is the validation function for validating if the field's value is a valid v4 CIDR address. func isCIDRv4(fl FieldLevel) bool { - ip, _, err := net.ParseCIDR(fl.Field().String()) return err == nil && ip.To4() != nil } -// IsCIDRv6 is the validation function for validating if the field's value is a valid v6 CIDR address. +// isCIDRv6 is the validation function for validating if the field's value is a valid v6 CIDR address. func isCIDRv6(fl FieldLevel) bool { - ip, _, err := net.ParseCIDR(fl.Field().String()) return err == nil && ip.To4() == nil } -// IsCIDR is the validation function for validating if the field's value is a valid v4 or v6 CIDR address. +// isCIDR is the validation function for validating if the field's value is a valid v4 or v6 CIDR address. func isCIDR(fl FieldLevel) bool { - _, _, err := net.ParseCIDR(fl.Field().String()) return err == nil } -// IsIPv4 is the validation function for validating if a value is a valid v4 IP address. +// isIPv4 is the validation function for validating if a value is a valid v4 IP address. func isIPv4(fl FieldLevel) bool { - ip := net.ParseIP(fl.Field().String()) return ip != nil && ip.To4() != nil } -// IsIPv6 is the validation function for validating if the field's value is a valid v6 IP address. +// isIPv6 is the validation function for validating if the field's value is a valid v6 IP address. func isIPv6(fl FieldLevel) bool { - ip := net.ParseIP(fl.Field().String()) return ip != nil && ip.To4() == nil } -// IsIP is the validation function for validating if the field's value is a valid v4 or v6 IP address. +// isIP is the validation function for validating if the field's value is a valid v4 or v6 IP address. func isIP(fl FieldLevel) bool { - ip := net.ParseIP(fl.Field().String()) return ip != nil } -// IsSSN is the validation function for validating if the field's value is a valid SSN. +// isSSN is the validation function for validating if the field's value is a valid SSN. func isSSN(fl FieldLevel) bool { - field := fl.Field() if field.Len() != 11 { @@ -364,7 +383,7 @@ func isSSN(fl FieldLevel) bool { return sSNRegex.MatchString(field.String()) } -// IsLongitude is the validation function for validating if the field's value is a valid longitude coordinate. +// isLongitude is the validation function for validating if the field's value is a valid longitude coordinate. func isLongitude(fl FieldLevel) bool { field := fl.Field() @@ -387,7 +406,7 @@ func isLongitude(fl FieldLevel) bool { return longitudeRegex.MatchString(v) } -// IsLatitude is the validation function for validating if the field's value is a valid latitude coordinate. +// isLatitude is the validation function for validating if the field's value is a valid latitude coordinate. func isLatitude(fl FieldLevel) bool { field := fl.Field() @@ -410,9 +429,8 @@ func isLatitude(fl FieldLevel) bool { return latitudeRegex.MatchString(v) } -// IsDataURI is the validation function for validating if the field's value is a valid data URI. +// isDataURI is the validation function for validating if the field's value is a valid data URI. func isDataURI(fl FieldLevel) bool { - uri := strings.SplitN(fl.Field().String(), ",", 2) if len(uri) != 2 { @@ -426,9 +444,8 @@ func isDataURI(fl FieldLevel) bool { return base64Regex.MatchString(uri[1]) } -// HasMultiByteCharacter is the validation function for validating if the field's value has a multi byte character. +// hasMultiByteCharacter is the validation function for validating if the field's value has a multi byte character. func hasMultiByteCharacter(fl FieldLevel) bool { - field := fl.Field() if field.Len() == 0 { @@ -438,64 +455,118 @@ func hasMultiByteCharacter(fl FieldLevel) bool { return multibyteRegex.MatchString(field.String()) } -// IsPrintableASCII is the validation function for validating if the field's value is a valid printable ASCII character. +// isPrintableASCII is the validation function for validating if the field's value is a valid printable ASCII character. func isPrintableASCII(fl FieldLevel) bool { return printableASCIIRegex.MatchString(fl.Field().String()) } -// IsASCII is the validation function for validating if the field's value is a valid ASCII character. +// isASCII is the validation function for validating if the field's value is a valid ASCII character. func isASCII(fl FieldLevel) bool { return aSCIIRegex.MatchString(fl.Field().String()) } -// IsUUID5 is the validation function for validating if the field's value is a valid v5 UUID. +// isUUID5 is the validation function for validating if the field's value is a valid v5 UUID. func isUUID5(fl FieldLevel) bool { return uUID5Regex.MatchString(fl.Field().String()) } -// IsUUID4 is the validation function for validating if the field's value is a valid v4 UUID. +// isUUID4 is the validation function for validating if the field's value is a valid v4 UUID. func isUUID4(fl FieldLevel) bool { return uUID4Regex.MatchString(fl.Field().String()) } -// IsUUID3 is the validation function for validating if the field's value is a valid v3 UUID. +// isUUID3 is the validation function for validating if the field's value is a valid v3 UUID. func isUUID3(fl FieldLevel) bool { return uUID3Regex.MatchString(fl.Field().String()) } -// IsUUID is the validation function for validating if the field's value is a valid UUID of any version. +// isUUID is the validation function for validating if the field's value is a valid UUID of any version. func isUUID(fl FieldLevel) bool { return uUIDRegex.MatchString(fl.Field().String()) } -// IsUUID5RFC4122 is the validation function for validating if the field's value is a valid RFC4122 v5 UUID. +// isUUID5RFC4122 is the validation function for validating if the field's value is a valid RFC4122 v5 UUID. func isUUID5RFC4122(fl FieldLevel) bool { return uUID5RFC4122Regex.MatchString(fl.Field().String()) } -// IsUUID4RFC4122 is the validation function for validating if the field's value is a valid RFC4122 v4 UUID. +// isUUID4RFC4122 is the validation function for validating if the field's value is a valid RFC4122 v4 UUID. func isUUID4RFC4122(fl FieldLevel) bool { return uUID4RFC4122Regex.MatchString(fl.Field().String()) } -// IsUUID3RFC4122 is the validation function for validating if the field's value is a valid RFC4122 v3 UUID. +// isUUID3RFC4122 is the validation function for validating if the field's value is a valid RFC4122 v3 UUID. func isUUID3RFC4122(fl FieldLevel) bool { return uUID3RFC4122Regex.MatchString(fl.Field().String()) } -// IsUUIDRFC4122 is the validation function for validating if the field's value is a valid RFC4122 UUID of any version. +// isUUIDRFC4122 is the validation function for validating if the field's value is a valid RFC4122 UUID of any version. func isUUIDRFC4122(fl FieldLevel) bool { return uUIDRFC4122Regex.MatchString(fl.Field().String()) } -// IsISBN is the validation function for validating if the field's value is a valid v10 or v13 ISBN. +// isULID is the validation function for validating if the field's value is a valid ULID. +func isULID(fl FieldLevel) bool { + return uLIDRegex.MatchString(fl.Field().String()) +} + +// isMD4 is the validation function for validating if the field's value is a valid MD4. +func isMD4(fl FieldLevel) bool { + return md4Regex.MatchString(fl.Field().String()) +} + +// isMD5 is the validation function for validating if the field's value is a valid MD5. +func isMD5(fl FieldLevel) bool { + return md5Regex.MatchString(fl.Field().String()) +} + +// isSHA256 is the validation function for validating if the field's value is a valid SHA256. +func isSHA256(fl FieldLevel) bool { + return sha256Regex.MatchString(fl.Field().String()) +} + +// isSHA384 is the validation function for validating if the field's value is a valid SHA384. +func isSHA384(fl FieldLevel) bool { + return sha384Regex.MatchString(fl.Field().String()) +} + +// isSHA512 is the validation function for validating if the field's value is a valid SHA512. +func isSHA512(fl FieldLevel) bool { + return sha512Regex.MatchString(fl.Field().String()) +} + +// isRIPEMD128 is the validation function for validating if the field's value is a valid PIPEMD128. +func isRIPEMD128(fl FieldLevel) bool { + return ripemd128Regex.MatchString(fl.Field().String()) +} + +// isRIPEMD160 is the validation function for validating if the field's value is a valid PIPEMD160. +func isRIPEMD160(fl FieldLevel) bool { + return ripemd160Regex.MatchString(fl.Field().String()) +} + +// isTIGER128 is the validation function for validating if the field's value is a valid TIGER128. +func isTIGER128(fl FieldLevel) bool { + return tiger128Regex.MatchString(fl.Field().String()) +} + +// isTIGER160 is the validation function for validating if the field's value is a valid TIGER160. +func isTIGER160(fl FieldLevel) bool { + return tiger160Regex.MatchString(fl.Field().String()) +} + +// isTIGER192 is the validation function for validating if the field's value is a valid isTIGER192. +func isTIGER192(fl FieldLevel) bool { + return tiger192Regex.MatchString(fl.Field().String()) +} + +// isISBN is the validation function for validating if the field's value is a valid v10 or v13 ISBN. func isISBN(fl FieldLevel) bool { return isISBN10(fl) || isISBN13(fl) } -// IsISBN13 is the validation function for validating if the field's value is a valid v13 ISBN. +// isISBN13 is the validation function for validating if the field's value is a valid v13 ISBN. func isISBN13(fl FieldLevel) bool { - s := strings.Replace(strings.Replace(fl.Field().String(), "-", "", 4), " ", "", 4) if !iSBN13Regex.MatchString(s) { @@ -514,9 +585,8 @@ func isISBN13(fl FieldLevel) bool { return (int32(s[12]-'0'))-((10-(checksum%10))%10) == 0 } -// IsISBN10 is the validation function for validating if the field's value is a valid v10 ISBN. +// isISBN10 is the validation function for validating if the field's value is a valid v10 ISBN. func isISBN10(fl FieldLevel) bool { - s := strings.Replace(strings.Replace(fl.Field().String(), "-", "", 3), " ", "", 3) if !iSBN10Regex.MatchString(s) { @@ -539,7 +609,7 @@ func isISBN10(fl FieldLevel) bool { return checksum%11 == 0 } -// IsEthereumAddress is the validation function for validating if the field's value is a valid Ethereum address. +// isEthereumAddress is the validation function for validating if the field's value is a valid Ethereum address. func isEthereumAddress(fl FieldLevel) bool { address := fl.Field().String() @@ -547,7 +617,7 @@ func isEthereumAddress(fl FieldLevel) bool { return false } - if ethaddressRegexUpper.MatchString(address) || ethAddressRegexLower.MatchString(address) { + if ethAddressRegexUpper.MatchString(address) || ethAddressRegexLower.MatchString(address) { return true } @@ -570,7 +640,7 @@ func isEthereumAddress(fl FieldLevel) bool { return true } -// IsBitcoinAddress is the validation function for validating if the field's value is a valid btc address +// isBitcoinAddress is the validation function for validating if the field's value is a valid btc address func isBitcoinAddress(fl FieldLevel) bool { address := fl.Field().String() @@ -607,7 +677,7 @@ func isBitcoinAddress(fl FieldLevel) bool { return validchecksum == computedchecksum } -// IsBitcoinBech32Address is the validation function for validating if the field's value is a valid bech32 btc address +// isBitcoinBech32Address is the validation function for validating if the field's value is a valid bech32 btc address func isBitcoinBech32Address(fl FieldLevel) bool { address := fl.Field().String() @@ -687,60 +757,59 @@ func isBitcoinBech32Address(fl FieldLevel) bool { return true } -// ExcludesRune is the validation function for validating that the field's value does not contain the rune specified within the param. +// excludesRune is the validation function for validating that the field's value does not contain the rune specified within the param. func excludesRune(fl FieldLevel) bool { return !containsRune(fl) } -// ExcludesAll is the validation function for validating that the field's value does not contain any of the characters specified within the param. +// excludesAll is the validation function for validating that the field's value does not contain any of the characters specified within the param. func excludesAll(fl FieldLevel) bool { return !containsAny(fl) } -// Excludes is the validation function for validating that the field's value does not contain the text specified within the param. +// excludes is the validation function for validating that the field's value does not contain the text specified within the param. func excludes(fl FieldLevel) bool { return !contains(fl) } -// ContainsRune is the validation function for validating that the field's value contains the rune specified within the param. +// containsRune is the validation function for validating that the field's value contains the rune specified within the param. func containsRune(fl FieldLevel) bool { - r, _ := utf8.DecodeRuneInString(fl.Param()) return strings.ContainsRune(fl.Field().String(), r) } -// ContainsAny is the validation function for validating that the field's value contains any of the characters specified within the param. +// containsAny is the validation function for validating that the field's value contains any of the characters specified within the param. func containsAny(fl FieldLevel) bool { return strings.ContainsAny(fl.Field().String(), fl.Param()) } -// Contains is the validation function for validating that the field's value contains the text specified within the param. +// contains is the validation function for validating that the field's value contains the text specified within the param. func contains(fl FieldLevel) bool { return strings.Contains(fl.Field().String(), fl.Param()) } -// StartsWith is the validation function for validating that the field's value starts with the text specified within the param. +// startsWith is the validation function for validating that the field's value starts with the text specified within the param. func startsWith(fl FieldLevel) bool { return strings.HasPrefix(fl.Field().String(), fl.Param()) } -// EndsWith is the validation function for validating that the field's value ends with the text specified within the param. +// endsWith is the validation function for validating that the field's value ends with the text specified within the param. func endsWith(fl FieldLevel) bool { return strings.HasSuffix(fl.Field().String(), fl.Param()) } -// StartsNotWith is the validation function for validating that the field's value does not start with the text specified within the param. +// startsNotWith is the validation function for validating that the field's value does not start with the text specified within the param. func startsNotWith(fl FieldLevel) bool { return !startsWith(fl) } -// EndsNotWith is the validation function for validating that the field's value does not end with the text specified within the param. +// endsNotWith is the validation function for validating that the field's value does not end with the text specified within the param. func endsNotWith(fl FieldLevel) bool { return !endsWith(fl) } -// FieldContains is the validation function for validating if the current field's value contains the field specified by the param's value. +// fieldContains is the validation function for validating if the current field's value contains the field specified by the param's value. func fieldContains(fl FieldLevel) bool { field := fl.Field() @@ -753,7 +822,7 @@ func fieldContains(fl FieldLevel) bool { return strings.Contains(field.String(), currentField.String()) } -// FieldExcludes is the validation function for validating if the current field's value excludes the field specified by the param's value. +// fieldExcludes is the validation function for validating if the current field's value excludes the field specified by the param's value. func fieldExcludes(fl FieldLevel) bool { field := fl.Field() @@ -765,9 +834,8 @@ func fieldExcludes(fl FieldLevel) bool { return !strings.Contains(field.String(), currentField.String()) } -// IsNeField is the validation function for validating if the current field's value is not equal to the field specified by the param's value. +// isNeField is the validation function for validating if the current field's value is not equal to the field specified by the param's value. func isNeField(fl FieldLevel) bool { - field := fl.Field() kind := field.Kind() @@ -791,16 +859,14 @@ func isNeField(fl FieldLevel) bool { case reflect.Slice, reflect.Map, reflect.Array: return int64(field.Len()) != int64(currentField.Len()) + case reflect.Bool: + return field.Bool() != currentField.Bool() + case reflect.Struct: fieldType := field.Type() - // Not Same underlying type i.e. struct and time - if fieldType != currentField.Type() { - return true - } - - if fieldType == timeType { + if fieldType.ConvertibleTo(timeType) && currentField.Type().ConvertibleTo(timeType) { t := currentField.Interface().(time.Time) fieldTime := field.Interface().(time.Time) @@ -808,20 +874,23 @@ func isNeField(fl FieldLevel) bool { return !fieldTime.Equal(t) } + // Not Same underlying type i.e. struct and time + if fieldType != currentField.Type() { + return true + } } // default reflect.String: return field.String() != currentField.String() } -// IsNe is the validation function for validating that the field's value does not equal the provided param value. +// isNe is the validation function for validating that the field's value does not equal the provided param value. func isNe(fl FieldLevel) bool { return !isEq(fl) } -// IsLteCrossStructField is the validation function for validating if the current field's value is less than or equal to the field, within a separate struct, specified by the param's value. +// isLteCrossStructField is the validation function for validating if the current field's value is less than or equal to the field, within a separate struct, specified by the param's value. func isLteCrossStructField(fl FieldLevel) bool { - field := fl.Field() kind := field.Kind() @@ -848,28 +917,27 @@ func isLteCrossStructField(fl FieldLevel) bool { fieldType := field.Type() - // Not Same underlying type i.e. struct and time - if fieldType != topField.Type() { - return false - } + if fieldType.ConvertibleTo(timeType) && topField.Type().ConvertibleTo(timeType) { - if fieldType == timeType { - - fieldTime := field.Interface().(time.Time) - topTime := topField.Interface().(time.Time) + fieldTime := field.Convert(timeType).Interface().(time.Time) + topTime := topField.Convert(timeType).Interface().(time.Time) return fieldTime.Before(topTime) || fieldTime.Equal(topTime) } + + // Not Same underlying type i.e. struct and time + if fieldType != topField.Type() { + return false + } } // default reflect.String: return field.String() <= topField.String() } -// IsLtCrossStructField is the validation function for validating if the current field's value is less than the field, within a separate struct, specified by the param's value. +// isLtCrossStructField is the validation function for validating if the current field's value is less than the field, within a separate struct, specified by the param's value. // NOTE: This is exposed for use within your own custom functions and not intended to be called directly. func isLtCrossStructField(fl FieldLevel) bool { - field := fl.Field() kind := field.Kind() @@ -896,27 +964,26 @@ func isLtCrossStructField(fl FieldLevel) bool { fieldType := field.Type() - // Not Same underlying type i.e. struct and time - if fieldType != topField.Type() { - return false - } - - if fieldType == timeType { + if fieldType.ConvertibleTo(timeType) && topField.Type().ConvertibleTo(timeType) { - fieldTime := field.Interface().(time.Time) - topTime := topField.Interface().(time.Time) + fieldTime := field.Convert(timeType).Interface().(time.Time) + topTime := topField.Convert(timeType).Interface().(time.Time) return fieldTime.Before(topTime) } + + // Not Same underlying type i.e. struct and time + if fieldType != topField.Type() { + return false + } } // default reflect.String: return field.String() < topField.String() } -// IsGteCrossStructField is the validation function for validating if the current field's value is greater than or equal to the field, within a separate struct, specified by the param's value. +// isGteCrossStructField is the validation function for validating if the current field's value is greater than or equal to the field, within a separate struct, specified by the param's value. func isGteCrossStructField(fl FieldLevel) bool { - field := fl.Field() kind := field.Kind() @@ -943,27 +1010,26 @@ func isGteCrossStructField(fl FieldLevel) bool { fieldType := field.Type() - // Not Same underlying type i.e. struct and time - if fieldType != topField.Type() { - return false - } - - if fieldType == timeType { + if fieldType.ConvertibleTo(timeType) && topField.Type().ConvertibleTo(timeType) { - fieldTime := field.Interface().(time.Time) - topTime := topField.Interface().(time.Time) + fieldTime := field.Convert(timeType).Interface().(time.Time) + topTime := topField.Convert(timeType).Interface().(time.Time) return fieldTime.After(topTime) || fieldTime.Equal(topTime) } + + // Not Same underlying type i.e. struct and time + if fieldType != topField.Type() { + return false + } } // default reflect.String: return field.String() >= topField.String() } -// IsGtCrossStructField is the validation function for validating if the current field's value is greater than the field, within a separate struct, specified by the param's value. +// isGtCrossStructField is the validation function for validating if the current field's value is greater than the field, within a separate struct, specified by the param's value. func isGtCrossStructField(fl FieldLevel) bool { - field := fl.Field() kind := field.Kind() @@ -990,27 +1056,26 @@ func isGtCrossStructField(fl FieldLevel) bool { fieldType := field.Type() - // Not Same underlying type i.e. struct and time - if fieldType != topField.Type() { - return false - } + if fieldType.ConvertibleTo(timeType) && topField.Type().ConvertibleTo(timeType) { - if fieldType == timeType { - - fieldTime := field.Interface().(time.Time) - topTime := topField.Interface().(time.Time) + fieldTime := field.Convert(timeType).Interface().(time.Time) + topTime := topField.Convert(timeType).Interface().(time.Time) return fieldTime.After(topTime) } + + // Not Same underlying type i.e. struct and time + if fieldType != topField.Type() { + return false + } } // default reflect.String: return field.String() > topField.String() } -// IsNeCrossStructField is the validation function for validating that the current field's value is not equal to the field, within a separate struct, specified by the param's value. +// isNeCrossStructField is the validation function for validating that the current field's value is not equal to the field, within a separate struct, specified by the param's value. func isNeCrossStructField(fl FieldLevel) bool { - field := fl.Field() kind := field.Kind() @@ -1033,31 +1098,33 @@ func isNeCrossStructField(fl FieldLevel) bool { case reflect.Slice, reflect.Map, reflect.Array: return int64(topField.Len()) != int64(field.Len()) + case reflect.Bool: + return topField.Bool() != field.Bool() + case reflect.Struct: fieldType := field.Type() - // Not Same underlying type i.e. struct and time - if fieldType != topField.Type() { - return true - } - - if fieldType == timeType { + if fieldType.ConvertibleTo(timeType) && topField.Type().ConvertibleTo(timeType) { - t := field.Interface().(time.Time) - fieldTime := topField.Interface().(time.Time) + t := field.Convert(timeType).Interface().(time.Time) + fieldTime := topField.Convert(timeType).Interface().(time.Time) return !fieldTime.Equal(t) } + + // Not Same underlying type i.e. struct and time + if fieldType != topField.Type() { + return true + } } // default reflect.String: return topField.String() != field.String() } -// IsEqCrossStructField is the validation function for validating that the current field's value is equal to the field, within a separate struct, specified by the param's value. +// isEqCrossStructField is the validation function for validating that the current field's value is equal to the field, within a separate struct, specified by the param's value. func isEqCrossStructField(fl FieldLevel) bool { - field := fl.Field() kind := field.Kind() @@ -1080,31 +1147,33 @@ func isEqCrossStructField(fl FieldLevel) bool { case reflect.Slice, reflect.Map, reflect.Array: return int64(topField.Len()) == int64(field.Len()) + case reflect.Bool: + return topField.Bool() == field.Bool() + case reflect.Struct: fieldType := field.Type() - // Not Same underlying type i.e. struct and time - if fieldType != topField.Type() { - return false - } - - if fieldType == timeType { + if fieldType.ConvertibleTo(timeType) && topField.Type().ConvertibleTo(timeType) { - t := field.Interface().(time.Time) - fieldTime := topField.Interface().(time.Time) + t := field.Convert(timeType).Interface().(time.Time) + fieldTime := topField.Convert(timeType).Interface().(time.Time) return fieldTime.Equal(t) } + + // Not Same underlying type i.e. struct and time + if fieldType != topField.Type() { + return false + } } // default reflect.String: return topField.String() == field.String() } -// IsEqField is the validation function for validating if the current field's value is equal to the field specified by the param's value. +// isEqField is the validation function for validating if the current field's value is equal to the field specified by the param's value. func isEqField(fl FieldLevel) bool { - field := fl.Field() kind := field.Kind() @@ -1127,32 +1196,33 @@ func isEqField(fl FieldLevel) bool { case reflect.Slice, reflect.Map, reflect.Array: return int64(field.Len()) == int64(currentField.Len()) + case reflect.Bool: + return field.Bool() == currentField.Bool() + case reflect.Struct: fieldType := field.Type() - // Not Same underlying type i.e. struct and time - if fieldType != currentField.Type() { - return false - } + if fieldType.ConvertibleTo(timeType) && currentField.Type().ConvertibleTo(timeType) { - if fieldType == timeType { - - t := currentField.Interface().(time.Time) - fieldTime := field.Interface().(time.Time) + t := currentField.Convert(timeType).Interface().(time.Time) + fieldTime := field.Convert(timeType).Interface().(time.Time) return fieldTime.Equal(t) } + // Not Same underlying type i.e. struct and time + if fieldType != currentField.Type() { + return false + } } // default reflect.String: return field.String() == currentField.String() } -// IsEq is the validation function for validating if the current field's value is equal to the param's value. +// isEq is the validation function for validating if the current field's value is equal to the param's value. func isEq(fl FieldLevel) bool { - field := fl.Field() param := fl.Param() @@ -1190,23 +1260,62 @@ func isEq(fl FieldLevel) bool { panic(fmt.Sprintf("Bad field type %T", field.Interface())) } -// IsBase64 is the validation function for validating if the current field's value is a valid base 64. +// isPostcodeByIso3166Alpha2 validates by value which is country code in iso 3166 alpha 2 +// example: `postcode_iso3166_alpha2=US` +func isPostcodeByIso3166Alpha2(fl FieldLevel) bool { + field := fl.Field() + param := fl.Param() + + reg, found := postCodeRegexDict[param] + if !found { + return false + } + + return reg.MatchString(field.String()) +} + +// isPostcodeByIso3166Alpha2Field validates by field which represents for a value of country code in iso 3166 alpha 2 +// example: `postcode_iso3166_alpha2_field=CountryCode` +func isPostcodeByIso3166Alpha2Field(fl FieldLevel) bool { + field := fl.Field() + params := parseOneOfParam2(fl.Param()) + + if len(params) != 1 { + return false + } + + currentField, kind, _, found := fl.GetStructFieldOKAdvanced2(fl.Parent(), params[0]) + if !found { + return false + } + + if kind != reflect.String { + panic(fmt.Sprintf("Bad field type %T", currentField.Interface())) + } + + reg, found := postCodeRegexDict[currentField.String()] + if !found { + return false + } + + return reg.MatchString(field.String()) +} + +// isBase64 is the validation function for validating if the current field's value is a valid base 64. func isBase64(fl FieldLevel) bool { return base64Regex.MatchString(fl.Field().String()) } -// IsBase64URL is the validation function for validating if the current field's value is a valid base64 URL safe string. +// isBase64URL is the validation function for validating if the current field's value is a valid base64 URL safe string. func isBase64URL(fl FieldLevel) bool { return base64URLRegex.MatchString(fl.Field().String()) } -// IsURI is the validation function for validating if the current field's value is a valid URI. +// isURI is the validation function for validating if the current field's value is a valid URI. func isURI(fl FieldLevel) bool { - field := fl.Field() switch field.Kind() { - case reflect.String: s := field.String() @@ -1229,13 +1338,11 @@ func isURI(fl FieldLevel) bool { panic(fmt.Sprintf("Bad field type %T", field.Interface())) } -// IsURL is the validation function for validating if the current field's value is a valid URL. +// isURL is the validation function for validating if the current field's value is a valid URL. func isURL(fl FieldLevel) bool { - field := fl.Field() switch field.Kind() { - case reflect.String: var i int @@ -1268,7 +1375,6 @@ func isUrnRFC2141(fl FieldLevel) bool { field := fl.Field() switch field.Kind() { - case reflect.String: str := field.String() @@ -1281,7 +1387,7 @@ func isUrnRFC2141(fl FieldLevel) bool { panic(fmt.Sprintf("Bad field type %T", field.Interface())) } -// IsFile is the validation function for validating if the current field's value is a valid file path. +// isFile is the validation function for validating if the current field's value is a valid file path. func isFile(fl FieldLevel) bool { field := fl.Field() @@ -1298,47 +1404,47 @@ func isFile(fl FieldLevel) bool { panic(fmt.Sprintf("Bad field type %T", field.Interface())) } -// IsE164 is the validation function for validating if the current field's value is a valid e.164 formatted phone number. +// isE164 is the validation function for validating if the current field's value is a valid e.164 formatted phone number. func isE164(fl FieldLevel) bool { return e164Regex.MatchString(fl.Field().String()) } -// IsEmail is the validation function for validating if the current field's value is a valid email address. +// isEmail is the validation function for validating if the current field's value is a valid email address. func isEmail(fl FieldLevel) bool { return emailRegex.MatchString(fl.Field().String()) } -// IsHSLA is the validation function for validating if the current field's value is a valid HSLA color. +// isHSLA is the validation function for validating if the current field's value is a valid HSLA color. func isHSLA(fl FieldLevel) bool { return hslaRegex.MatchString(fl.Field().String()) } -// IsHSL is the validation function for validating if the current field's value is a valid HSL color. +// isHSL is the validation function for validating if the current field's value is a valid HSL color. func isHSL(fl FieldLevel) bool { return hslRegex.MatchString(fl.Field().String()) } -// IsRGBA is the validation function for validating if the current field's value is a valid RGBA color. +// isRGBA is the validation function for validating if the current field's value is a valid RGBA color. func isRGBA(fl FieldLevel) bool { return rgbaRegex.MatchString(fl.Field().String()) } -// IsRGB is the validation function for validating if the current field's value is a valid RGB color. +// isRGB is the validation function for validating if the current field's value is a valid RGB color. func isRGB(fl FieldLevel) bool { return rgbRegex.MatchString(fl.Field().String()) } -// IsHEXColor is the validation function for validating if the current field's value is a valid HEX color. +// isHEXColor is the validation function for validating if the current field's value is a valid HEX color. func isHEXColor(fl FieldLevel) bool { - return hexcolorRegex.MatchString(fl.Field().String()) + return hexColorRegex.MatchString(fl.Field().String()) } -// IsHexadecimal is the validation function for validating if the current field's value is a valid hexadecimal. +// isHexadecimal is the validation function for validating if the current field's value is a valid hexadecimal. func isHexadecimal(fl FieldLevel) bool { return hexadecimalRegex.MatchString(fl.Field().String()) } -// IsNumber is the validation function for validating if the current field's value is a valid number. +// isNumber is the validation function for validating if the current field's value is a valid number. func isNumber(fl FieldLevel) bool { switch fl.Field().Kind() { case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr, reflect.Float32, reflect.Float64: @@ -1348,7 +1454,7 @@ func isNumber(fl FieldLevel) bool { } } -// IsNumeric is the validation function for validating if the current field's value is a valid numeric value. +// isNumeric is the validation function for validating if the current field's value is a valid numeric value. func isNumeric(fl FieldLevel) bool { switch fl.Field().Kind() { case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr, reflect.Float32, reflect.Float64: @@ -1358,32 +1464,43 @@ func isNumeric(fl FieldLevel) bool { } } -// IsAlphanum is the validation function for validating if the current field's value is a valid alphanumeric value. +// isAlphanum is the validation function for validating if the current field's value is a valid alphanumeric value. func isAlphanum(fl FieldLevel) bool { return alphaNumericRegex.MatchString(fl.Field().String()) } -// IsAlpha is the validation function for validating if the current field's value is a valid alpha value. +// isAlpha is the validation function for validating if the current field's value is a valid alpha value. func isAlpha(fl FieldLevel) bool { return alphaRegex.MatchString(fl.Field().String()) } -// IsAlphanumUnicode is the validation function for validating if the current field's value is a valid alphanumeric unicode value. +// isAlphanumUnicode is the validation function for validating if the current field's value is a valid alphanumeric unicode value. func isAlphanumUnicode(fl FieldLevel) bool { return alphaUnicodeNumericRegex.MatchString(fl.Field().String()) } -// IsAlphaUnicode is the validation function for validating if the current field's value is a valid alpha unicode value. +// isAlphaUnicode is the validation function for validating if the current field's value is a valid alpha unicode value. func isAlphaUnicode(fl FieldLevel) bool { return alphaUnicodeRegex.MatchString(fl.Field().String()) } +// isBoolean is the validation function for validating if the current field's value is a valid boolean value or can be safely converted to a boolean value. +func isBoolean(fl FieldLevel) bool { + switch fl.Field().Kind() { + case reflect.Bool: + return true + default: + _, err := strconv.ParseBool(fl.Field().String()) + return err == nil + } +} + // isDefault is the opposite of required aka hasValue func isDefault(fl FieldLevel) bool { return !hasValue(fl) } -// HasValue is the validation function for validating if the current field's value is not the default static value. +// hasValue is the validation function for validating if the current field's value is not the default static value. func hasValue(fl FieldLevel) bool { field := fl.Field() switch field.Kind() { @@ -1441,6 +1558,9 @@ func requireCheckFieldValue(fl FieldLevel, param string, value string, defaultNo case reflect.Slice, reflect.Map, reflect.Array: return int64(field.Len()) == asInt(value) + + case reflect.Bool: + return field.Bool() == asBool(value) } // default reflect.String: @@ -1462,6 +1582,22 @@ func requiredIf(fl FieldLevel) bool { return hasValue(fl) } +// excludedIf is the validation function +// The field under validation must not be present or is empty only if all the other specified fields are equal to the value following with the specified field. +func excludedIf(fl FieldLevel) bool { + params := parseOneOfParam2(fl.Param()) + if len(params)%2 != 0 { + panic(fmt.Sprintf("Bad param number for excluded_if %s", fl.FieldName())) + } + + for i := 0; i < len(params); i += 2 { + if !requireCheckFieldValue(fl, params[i], params[i+1], false) { + return false + } + } + return true +} + // requiredUnless is the validation function // The field under validation must be present and not empty only unless all the other specified fields are equal to the value following with the specified field. func requiredUnless(fl FieldLevel) bool { @@ -1478,7 +1614,22 @@ func requiredUnless(fl FieldLevel) bool { return hasValue(fl) } -// ExcludedWith is the validation function +// excludedUnless is the validation function +// The field under validation must not be present or is empty unless all the other specified fields are equal to the value following with the specified field. +func excludedUnless(fl FieldLevel) bool { + params := parseOneOfParam2(fl.Param()) + if len(params)%2 != 0 { + panic(fmt.Sprintf("Bad param number for excluded_unless %s", fl.FieldName())) + } + for i := 0; i < len(params); i += 2 { + if !requireCheckFieldValue(fl, params[i], params[i+1], false) { + return true + } + } + return !hasValue(fl) +} + +// excludedWith is the validation function // The field under validation must not be present or is empty if any of the other specified fields are present. func excludedWith(fl FieldLevel) bool { params := parseOneOfParam2(fl.Param()) @@ -1490,7 +1641,7 @@ func excludedWith(fl FieldLevel) bool { return true } -// RequiredWith is the validation function +// requiredWith is the validation function // The field under validation must be present and not empty only if any of the other specified fields are present. func requiredWith(fl FieldLevel) bool { params := parseOneOfParam2(fl.Param()) @@ -1502,7 +1653,7 @@ func requiredWith(fl FieldLevel) bool { return true } -// ExcludedWithAll is the validation function +// excludedWithAll is the validation function // The field under validation must not be present or is empty if all of the other specified fields are present. func excludedWithAll(fl FieldLevel) bool { params := parseOneOfParam2(fl.Param()) @@ -1514,7 +1665,7 @@ func excludedWithAll(fl FieldLevel) bool { return !hasValue(fl) } -// RequiredWithAll is the validation function +// requiredWithAll is the validation function // The field under validation must be present and not empty only if all of the other specified fields are present. func requiredWithAll(fl FieldLevel) bool { params := parseOneOfParam2(fl.Param()) @@ -1526,7 +1677,7 @@ func requiredWithAll(fl FieldLevel) bool { return hasValue(fl) } -// ExcludedWithout is the validation function +// excludedWithout is the validation function // The field under validation must not be present or is empty when any of the other specified fields are not present. func excludedWithout(fl FieldLevel) bool { if requireCheckFieldKind(fl, strings.TrimSpace(fl.Param()), true) { @@ -1535,7 +1686,7 @@ func excludedWithout(fl FieldLevel) bool { return true } -// RequiredWithout is the validation function +// requiredWithout is the validation function // The field under validation must be present and not empty only when any of the other specified fields are not present. func requiredWithout(fl FieldLevel) bool { if requireCheckFieldKind(fl, strings.TrimSpace(fl.Param()), true) { @@ -1544,7 +1695,7 @@ func requiredWithout(fl FieldLevel) bool { return true } -// RequiredWithoutAll is the validation function +// excludedWithoutAll is the validation function // The field under validation must not be present or is empty when all of the other specified fields are not present. func excludedWithoutAll(fl FieldLevel) bool { params := parseOneOfParam2(fl.Param()) @@ -1556,7 +1707,7 @@ func excludedWithoutAll(fl FieldLevel) bool { return !hasValue(fl) } -// RequiredWithoutAll is the validation function +// requiredWithoutAll is the validation function // The field under validation must be present and not empty only when all of the other specified fields are not present. func requiredWithoutAll(fl FieldLevel) bool { params := parseOneOfParam2(fl.Param()) @@ -1568,9 +1719,8 @@ func requiredWithoutAll(fl FieldLevel) bool { return hasValue(fl) } -// IsGteField is the validation function for validating if the current field's value is greater than or equal to the field specified by the param's value. +// isGteField is the validation function for validating if the current field's value is greater than or equal to the field specified by the param's value. func isGteField(fl FieldLevel) bool { - field := fl.Field() kind := field.Kind() @@ -1597,27 +1747,26 @@ func isGteField(fl FieldLevel) bool { fieldType := field.Type() - // Not Same underlying type i.e. struct and time - if fieldType != currentField.Type() { - return false - } - - if fieldType == timeType { + if fieldType.ConvertibleTo(timeType) && currentField.Type().ConvertibleTo(timeType) { - t := currentField.Interface().(time.Time) - fieldTime := field.Interface().(time.Time) + t := currentField.Convert(timeType).Interface().(time.Time) + fieldTime := field.Convert(timeType).Interface().(time.Time) return fieldTime.After(t) || fieldTime.Equal(t) } + + // Not Same underlying type i.e. struct and time + if fieldType != currentField.Type() { + return false + } } // default reflect.String return len(field.String()) >= len(currentField.String()) } -// IsGtField is the validation function for validating if the current field's value is greater than the field specified by the param's value. +// isGtField is the validation function for validating if the current field's value is greater than the field specified by the param's value. func isGtField(fl FieldLevel) bool { - field := fl.Field() kind := field.Kind() @@ -1644,27 +1793,26 @@ func isGtField(fl FieldLevel) bool { fieldType := field.Type() - // Not Same underlying type i.e. struct and time - if fieldType != currentField.Type() { - return false - } - - if fieldType == timeType { + if fieldType.ConvertibleTo(timeType) && currentField.Type().ConvertibleTo(timeType) { - t := currentField.Interface().(time.Time) - fieldTime := field.Interface().(time.Time) + t := currentField.Convert(timeType).Interface().(time.Time) + fieldTime := field.Convert(timeType).Interface().(time.Time) return fieldTime.After(t) } + + // Not Same underlying type i.e. struct and time + if fieldType != currentField.Type() { + return false + } } // default reflect.String return len(field.String()) > len(currentField.String()) } -// IsGte is the validation function for validating if the current field's value is greater than or equal to the param's value. +// isGte is the validation function for validating if the current field's value is greater than or equal to the param's value. func isGte(fl FieldLevel) bool { - field := fl.Field() param := fl.Param() @@ -1697,10 +1845,10 @@ func isGte(fl FieldLevel) bool { case reflect.Struct: - if field.Type() == timeType { + if field.Type().ConvertibleTo(timeType) { now := time.Now().UTC() - t := field.Interface().(time.Time) + t := field.Convert(timeType).Interface().(time.Time) return t.After(now) || t.Equal(now) } @@ -1709,9 +1857,8 @@ func isGte(fl FieldLevel) bool { panic(fmt.Sprintf("Bad field type %T", field.Interface())) } -// IsGt is the validation function for validating if the current field's value is greater than the param's value. +// isGt is the validation function for validating if the current field's value is greater than the param's value. func isGt(fl FieldLevel) bool { - field := fl.Field() param := fl.Param() @@ -1743,18 +1890,17 @@ func isGt(fl FieldLevel) bool { return field.Float() > p case reflect.Struct: - if field.Type() == timeType { + if field.Type().ConvertibleTo(timeType) { - return field.Interface().(time.Time).After(time.Now().UTC()) + return field.Convert(timeType).Interface().(time.Time).After(time.Now().UTC()) } } panic(fmt.Sprintf("Bad field type %T", field.Interface())) } -// HasLengthOf is the validation function for validating if the current field's value is equal to the param's value. +// hasLengthOf is the validation function for validating if the current field's value is equal to the param's value. func hasLengthOf(fl FieldLevel) bool { - field := fl.Field() param := fl.Param() @@ -1789,14 +1935,13 @@ func hasLengthOf(fl FieldLevel) bool { panic(fmt.Sprintf("Bad field type %T", field.Interface())) } -// HasMinOf is the validation function for validating if the current field's value is greater than or equal to the param's value. +// hasMinOf is the validation function for validating if the current field's value is greater than or equal to the param's value. func hasMinOf(fl FieldLevel) bool { return isGte(fl) } -// IsLteField is the validation function for validating if the current field's value is less than or equal to the field specified by the param's value. +// isLteField is the validation function for validating if the current field's value is less than or equal to the field specified by the param's value. func isLteField(fl FieldLevel) bool { - field := fl.Field() kind := field.Kind() @@ -1823,27 +1968,26 @@ func isLteField(fl FieldLevel) bool { fieldType := field.Type() - // Not Same underlying type i.e. struct and time - if fieldType != currentField.Type() { - return false - } - - if fieldType == timeType { + if fieldType.ConvertibleTo(timeType) && currentField.Type().ConvertibleTo(timeType) { - t := currentField.Interface().(time.Time) - fieldTime := field.Interface().(time.Time) + t := currentField.Convert(timeType).Interface().(time.Time) + fieldTime := field.Convert(timeType).Interface().(time.Time) return fieldTime.Before(t) || fieldTime.Equal(t) } + + // Not Same underlying type i.e. struct and time + if fieldType != currentField.Type() { + return false + } } // default reflect.String return len(field.String()) <= len(currentField.String()) } -// IsLtField is the validation function for validating if the current field's value is less than the field specified by the param's value. +// isLtField is the validation function for validating if the current field's value is less than the field specified by the param's value. func isLtField(fl FieldLevel) bool { - field := fl.Field() kind := field.Kind() @@ -1870,27 +2014,26 @@ func isLtField(fl FieldLevel) bool { fieldType := field.Type() - // Not Same underlying type i.e. struct and time - if fieldType != currentField.Type() { - return false - } + if fieldType.ConvertibleTo(timeType) && currentField.Type().ConvertibleTo(timeType) { - if fieldType == timeType { - - t := currentField.Interface().(time.Time) - fieldTime := field.Interface().(time.Time) + t := currentField.Convert(timeType).Interface().(time.Time) + fieldTime := field.Convert(timeType).Interface().(time.Time) return fieldTime.Before(t) } + + // Not Same underlying type i.e. struct and time + if fieldType != currentField.Type() { + return false + } } // default reflect.String return len(field.String()) < len(currentField.String()) } -// IsLte is the validation function for validating if the current field's value is less than or equal to the param's value. +// isLte is the validation function for validating if the current field's value is less than or equal to the param's value. func isLte(fl FieldLevel) bool { - field := fl.Field() param := fl.Param() @@ -1923,10 +2066,10 @@ func isLte(fl FieldLevel) bool { case reflect.Struct: - if field.Type() == timeType { + if field.Type().ConvertibleTo(timeType) { now := time.Now().UTC() - t := field.Interface().(time.Time) + t := field.Convert(timeType).Interface().(time.Time) return t.Before(now) || t.Equal(now) } @@ -1935,9 +2078,8 @@ func isLte(fl FieldLevel) bool { panic(fmt.Sprintf("Bad field type %T", field.Interface())) } -// IsLt is the validation function for validating if the current field's value is less than the param's value. +// isLt is the validation function for validating if the current field's value is less than the param's value. func isLt(fl FieldLevel) bool { - field := fl.Field() param := fl.Param() @@ -1970,23 +2112,22 @@ func isLt(fl FieldLevel) bool { case reflect.Struct: - if field.Type() == timeType { + if field.Type().ConvertibleTo(timeType) { - return field.Interface().(time.Time).Before(time.Now().UTC()) + return field.Convert(timeType).Interface().(time.Time).Before(time.Now().UTC()) } } panic(fmt.Sprintf("Bad field type %T", field.Interface())) } -// HasMaxOf is the validation function for validating if the current field's value is less than or equal to the param's value. +// hasMaxOf is the validation function for validating if the current field's value is less than or equal to the param's value. func hasMaxOf(fl FieldLevel) bool { return isLte(fl) } -// IsTCP4AddrResolvable is the validation function for validating if the field's value is a resolvable tcp4 address. +// isTCP4AddrResolvable is the validation function for validating if the field's value is a resolvable tcp4 address. func isTCP4AddrResolvable(fl FieldLevel) bool { - if !isIP4Addr(fl) { return false } @@ -1995,9 +2136,8 @@ func isTCP4AddrResolvable(fl FieldLevel) bool { return err == nil } -// IsTCP6AddrResolvable is the validation function for validating if the field's value is a resolvable tcp6 address. +// isTCP6AddrResolvable is the validation function for validating if the field's value is a resolvable tcp6 address. func isTCP6AddrResolvable(fl FieldLevel) bool { - if !isIP6Addr(fl) { return false } @@ -2007,9 +2147,8 @@ func isTCP6AddrResolvable(fl FieldLevel) bool { return err == nil } -// IsTCPAddrResolvable is the validation function for validating if the field's value is a resolvable tcp address. +// isTCPAddrResolvable is the validation function for validating if the field's value is a resolvable tcp address. func isTCPAddrResolvable(fl FieldLevel) bool { - if !isIP4Addr(fl) && !isIP6Addr(fl) { return false } @@ -2019,9 +2158,8 @@ func isTCPAddrResolvable(fl FieldLevel) bool { return err == nil } -// IsUDP4AddrResolvable is the validation function for validating if the field's value is a resolvable udp4 address. +// isUDP4AddrResolvable is the validation function for validating if the field's value is a resolvable udp4 address. func isUDP4AddrResolvable(fl FieldLevel) bool { - if !isIP4Addr(fl) { return false } @@ -2031,9 +2169,8 @@ func isUDP4AddrResolvable(fl FieldLevel) bool { return err == nil } -// IsUDP6AddrResolvable is the validation function for validating if the field's value is a resolvable udp6 address. +// isUDP6AddrResolvable is the validation function for validating if the field's value is a resolvable udp6 address. func isUDP6AddrResolvable(fl FieldLevel) bool { - if !isIP6Addr(fl) { return false } @@ -2043,9 +2180,8 @@ func isUDP6AddrResolvable(fl FieldLevel) bool { return err == nil } -// IsUDPAddrResolvable is the validation function for validating if the field's value is a resolvable udp address. +// isUDPAddrResolvable is the validation function for validating if the field's value is a resolvable udp address. func isUDPAddrResolvable(fl FieldLevel) bool { - if !isIP4Addr(fl) && !isIP6Addr(fl) { return false } @@ -2055,9 +2191,8 @@ func isUDPAddrResolvable(fl FieldLevel) bool { return err == nil } -// IsIP4AddrResolvable is the validation function for validating if the field's value is a resolvable ip4 address. +// isIP4AddrResolvable is the validation function for validating if the field's value is a resolvable ip4 address. func isIP4AddrResolvable(fl FieldLevel) bool { - if !isIPv4(fl) { return false } @@ -2067,9 +2202,8 @@ func isIP4AddrResolvable(fl FieldLevel) bool { return err == nil } -// IsIP6AddrResolvable is the validation function for validating if the field's value is a resolvable ip6 address. +// isIP6AddrResolvable is the validation function for validating if the field's value is a resolvable ip6 address. func isIP6AddrResolvable(fl FieldLevel) bool { - if !isIPv6(fl) { return false } @@ -2079,9 +2213,8 @@ func isIP6AddrResolvable(fl FieldLevel) bool { return err == nil } -// IsIPAddrResolvable is the validation function for validating if the field's value is a resolvable ip address. +// isIPAddrResolvable is the validation function for validating if the field's value is a resolvable ip address. func isIPAddrResolvable(fl FieldLevel) bool { - if !isIP(fl) { return false } @@ -2091,16 +2224,14 @@ func isIPAddrResolvable(fl FieldLevel) bool { return err == nil } -// IsUnixAddrResolvable is the validation function for validating if the field's value is a resolvable unix address. +// isUnixAddrResolvable is the validation function for validating if the field's value is a resolvable unix address. func isUnixAddrResolvable(fl FieldLevel) bool { - _, err := net.ResolveUnixAddr("unix", fl.Field().String()) return err == nil } func isIP4Addr(fl FieldLevel) bool { - val := fl.Field().String() if idx := strings.LastIndex(val, ":"); idx != -1 { @@ -2113,7 +2244,6 @@ func isIP4Addr(fl FieldLevel) bool { } func isIP6Addr(fl FieldLevel) bool { - val := fl.Field().String() if idx := strings.LastIndex(val, ":"); idx != -1 { @@ -2145,7 +2275,7 @@ func isFQDN(fl FieldLevel) bool { return fqdnRegexRFC1123.MatchString(val) } -// IsDir is the validation function for validating if the current field's value is a valid directory. +// isDir is the validation function for validating if the current field's value is a valid directory. func isDir(fl FieldLevel) bool { field := fl.Field() @@ -2173,6 +2303,11 @@ func isJSON(fl FieldLevel) bool { panic(fmt.Sprintf("Bad field type %T", field.Interface())) } +// isJWT is the validation function for validating if the current field's value is a valid JWT string. +func isJWT(fl FieldLevel) bool { + return jWTRegex.MatchString(fl.Field().String()) +} + // isHostnamePort validates a : combination for fields typically used for socket address. func isHostnamePort(fl FieldLevel) bool { val := fl.Field().String() @@ -2274,6 +2409,12 @@ func isIso3166AlphaNumeric(fl FieldLevel) bool { var code int switch field.Kind() { + case reflect.String: + i, err := strconv.Atoi(field.String()) + if err != nil { + return false + } + code = i % 1000 case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: code = int(field.Int() % 1000) case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: @@ -2283,3 +2424,103 @@ func isIso3166AlphaNumeric(fl FieldLevel) bool { } return iso3166_1_alpha_numeric[code] } + +// isIso31662 is the validation function for validating if the current field's value is a valid iso3166-2 code. +func isIso31662(fl FieldLevel) bool { + val := fl.Field().String() + return iso3166_2[val] +} + +// isIso4217 is the validation function for validating if the current field's value is a valid iso4217 currency code. +func isIso4217(fl FieldLevel) bool { + val := fl.Field().String() + return iso4217[val] +} + +// isIso4217Numeric is the validation function for validating if the current field's value is a valid iso4217 numeric currency code. +func isIso4217Numeric(fl FieldLevel) bool { + field := fl.Field() + + var code int + switch field.Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + code = int(field.Int()) + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + code = int(field.Uint()) + default: + panic(fmt.Sprintf("Bad field type %T", field.Interface())) + } + return iso4217_numeric[code] +} + +// isBCP47LanguageTag is the validation function for validating if the current field's value is a valid BCP 47 language tag, as parsed by language.Parse +func isBCP47LanguageTag(fl FieldLevel) bool { + field := fl.Field() + + if field.Kind() == reflect.String { + _, err := language.Parse(field.String()) + return err == nil + } + + panic(fmt.Sprintf("Bad field type %T", field.Interface())) +} + +// isIsoBicFormat is the validation function for validating if the current field's value is a valid Business Identifier Code (SWIFT code), defined in ISO 9362 +func isIsoBicFormat(fl FieldLevel) bool { + bicString := fl.Field().String() + + return bicRegex.MatchString(bicString) +} + +// isSemverFormat is the validation function for validating if the current field's value is a valid semver version, defined in Semantic Versioning 2.0.0 +func isSemverFormat(fl FieldLevel) bool { + semverString := fl.Field().String() + + return semverRegex.MatchString(semverString) +} + +// isDnsRFC1035LabelFormat is the validation function +// for validating if the current field's value is +// a valid dns RFC 1035 label, defined in RFC 1035. +func isDnsRFC1035LabelFormat(fl FieldLevel) bool { + val := fl.Field().String() + return dnsRegexRFC1035Label.MatchString(val) +} + +// isCreditCard is the validation function for validating if the current field's value is a valid credit card number +func isCreditCard(fl FieldLevel) bool { + val := fl.Field().String() + var creditCard bytes.Buffer + segments := strings.Split(val, " ") + for _, segment := range segments { + if len(segment) < 3 { + return false + } + creditCard.WriteString(segment) + } + + ccDigits := strings.Split(creditCard.String(), "") + size := len(ccDigits) + if size < 12 || size > 19 { + return false + } + + sum := 0 + for i, digit := range ccDigits { + value, err := strconv.Atoi(digit) + if err != nil { + return false + } + if size%2 == 0 && i%2 == 0 || size%2 == 1 && i%2 == 1 { + v := value * 2 + if v >= 10 { + sum += 1 + (v % 10) + } else { + sum += v + } + } else { + sum += value + } + } + return (sum % 10) == 0 +} diff --git a/index/server/vendor/github.com/go-playground/validator/v10/cache.go b/index/server/vendor/github.com/go-playground/validator/v10/cache.go index 0d18d6ec..7b84c91f 100644 --- a/index/server/vendor/github.com/go-playground/validator/v10/cache.go +++ b/index/server/vendor/github.com/go-playground/validator/v10/cache.go @@ -114,12 +114,13 @@ func (v *Validate) extractStructCache(current reflect.Value, sName string) *cStr cs = &cStruct{name: sName, fields: make([]*cField, 0), fn: v.structLevelFuncs[typ]} numFields := current.NumField() + rules := v.rules[typ] var ctag *cTag var fld reflect.StructField var tag string var customName string - + for i := 0; i < numFields; i++ { fld = typ.Field(i) @@ -128,7 +129,11 @@ func (v *Validate) extractStructCache(current reflect.Value, sName string) *cStr continue } - tag = fld.Tag.Get(v.tagName) + if rtag, ok := rules[fld.Name]; ok { + tag = rtag + } else { + tag = fld.Tag.Get(v.tagName) + } if tag == skipValidationTag { continue diff --git a/index/server/vendor/github.com/go-playground/validator/v10/country_codes.go b/index/server/vendor/github.com/go-playground/validator/v10/country_codes.go index ef81eada..0d9eda03 100644 --- a/index/server/vendor/github.com/go-playground/validator/v10/country_codes.go +++ b/index/server/vendor/github.com/go-playground/validator/v10/country_codes.go @@ -160,3 +160,973 @@ var iso3166_1_alpha_numeric = map[int]bool{ 704: true, 92: true, 850: true, 876: true, 732: true, 887: true, 894: true, 716: true, 248: true, } + +var iso3166_2 = map[string]bool{ + "AD-02" : true, "AD-03" : true, "AD-04" : true, "AD-05" : true, "AD-06" : true, + "AD-07" : true, "AD-08" : true, "AE-AJ" : true, "AE-AZ" : true, "AE-DU" : true, + "AE-FU" : true, "AE-RK" : true, "AE-SH" : true, "AE-UQ" : true, "AF-BAL" : true, + "AF-BAM" : true, "AF-BDG" : true, "AF-BDS" : true, "AF-BGL" : true, "AF-DAY" : true, + "AF-FRA" : true, "AF-FYB" : true, "AF-GHA" : true, "AF-GHO" : true, "AF-HEL" : true, + "AF-HER" : true, "AF-JOW" : true, "AF-KAB" : true, "AF-KAN" : true, "AF-KAP" : true, + "AF-KDZ" : true, "AF-KHO" : true, "AF-KNR" : true, "AF-LAG" : true, "AF-LOG" : true, + "AF-NAN" : true, "AF-NIM" : true, "AF-NUR" : true, "AF-PAN" : true, "AF-PAR" : true, + "AF-PIA" : true, "AF-PKA" : true, "AF-SAM" : true, "AF-SAR" : true, "AF-TAK" : true, + "AF-URU" : true, "AF-WAR" : true, "AF-ZAB" : true, "AG-03" : true, "AG-04" : true, + "AG-05" : true, "AG-06" : true, "AG-07" : true, "AG-08" : true, "AG-10" : true, + "AG-11" : true, "AL-01" : true, "AL-02" : true, "AL-03" : true, "AL-04" : true, + "AL-05" : true, "AL-06" : true, "AL-07" : true, "AL-08" : true, "AL-09" : true, + "AL-10" : true, "AL-11" : true, "AL-12" : true, "AL-BR" : true, "AL-BU" : true, + "AL-DI" : true, "AL-DL" : true, "AL-DR" : true, "AL-DV" : true, "AL-EL" : true, + "AL-ER" : true, "AL-FR" : true, "AL-GJ" : true, "AL-GR" : true, "AL-HA" : true, + "AL-KA" : true, "AL-KB" : true, "AL-KC" : true, "AL-KO" : true, "AL-KR" : true, + "AL-KU" : true, "AL-LB" : true, "AL-LE" : true, "AL-LU" : true, "AL-MK" : true, + "AL-MM" : true, "AL-MR" : true, "AL-MT" : true, "AL-PG" : true, "AL-PQ" : true, + "AL-PR" : true, "AL-PU" : true, "AL-SH" : true, "AL-SK" : true, "AL-SR" : true, + "AL-TE" : true, "AL-TP" : true, "AL-TR" : true, "AL-VL" : true, "AM-AG" : true, + "AM-AR" : true, "AM-AV" : true, "AM-ER" : true, "AM-GR" : true, "AM-KT" : true, + "AM-LO" : true, "AM-SH" : true, "AM-SU" : true, "AM-TV" : true, "AM-VD" : true, + "AO-BGO" : true, "AO-BGU" : true, "AO-BIE" : true, "AO-CAB" : true, "AO-CCU" : true, + "AO-CNN" : true, "AO-CNO" : true, "AO-CUS" : true, "AO-HUA" : true, "AO-HUI" : true, + "AO-LNO" : true, "AO-LSU" : true, "AO-LUA" : true, "AO-MAL" : true, "AO-MOX" : true, + "AO-NAM" : true, "AO-UIG" : true, "AO-ZAI" : true, "AR-A" : true, "AR-B" : true, + "AR-C" : true, "AR-D" : true, "AR-E" : true, "AR-G" : true, "AR-H" : true, + "AR-J" : true, "AR-K" : true, "AR-L" : true, "AR-M" : true, "AR-N" : true, + "AR-P" : true, "AR-Q" : true, "AR-R" : true, "AR-S" : true, "AR-T" : true, + "AR-U" : true, "AR-V" : true, "AR-W" : true, "AR-X" : true, "AR-Y" : true, + "AR-Z" : true, "AT-1" : true, "AT-2" : true, "AT-3" : true, "AT-4" : true, + "AT-5" : true, "AT-6" : true, "AT-7" : true, "AT-8" : true, "AT-9" : true, + "AU-ACT" : true, "AU-NSW" : true, "AU-NT" : true, "AU-QLD" : true, "AU-SA" : true, + "AU-TAS" : true, "AU-VIC" : true, "AU-WA" : true, "AZ-ABS" : true, "AZ-AGA" : true, + "AZ-AGC" : true, "AZ-AGM" : true, "AZ-AGS" : true, "AZ-AGU" : true, "AZ-AST" : true, + "AZ-BA" : true, "AZ-BAB" : true, "AZ-BAL" : true, "AZ-BAR" : true, "AZ-BEY" : true, + "AZ-BIL" : true, "AZ-CAB" : true, "AZ-CAL" : true, "AZ-CUL" : true, "AZ-DAS" : true, + "AZ-FUZ" : true, "AZ-GA" : true, "AZ-GAD" : true, "AZ-GOR" : true, "AZ-GOY" : true, + "AZ-GYG" : true, "AZ-HAC" : true, "AZ-IMI" : true, "AZ-ISM" : true, "AZ-KAL" : true, + "AZ-KAN" : true, "AZ-KUR" : true, "AZ-LA" : true, "AZ-LAC" : true, "AZ-LAN" : true, + "AZ-LER" : true, "AZ-MAS" : true, "AZ-MI" : true, "AZ-NA" : true, "AZ-NEF" : true, + "AZ-NV" : true, "AZ-NX" : true, "AZ-OGU" : true, "AZ-ORD" : true, "AZ-QAB" : true, + "AZ-QAX" : true, "AZ-QAZ" : true, "AZ-QBA" : true, "AZ-QBI" : true, "AZ-QOB" : true, + "AZ-QUS" : true, "AZ-SA" : true, "AZ-SAB" : true, "AZ-SAD" : true, "AZ-SAH" : true, + "AZ-SAK" : true, "AZ-SAL" : true, "AZ-SAR" : true, "AZ-SAT" : true, "AZ-SBN" : true, + "AZ-SIY" : true, "AZ-SKR" : true, "AZ-SM" : true, "AZ-SMI" : true, "AZ-SMX" : true, + "AZ-SR" : true, "AZ-SUS" : true, "AZ-TAR" : true, "AZ-TOV" : true, "AZ-UCA" : true, + "AZ-XA" : true, "AZ-XAC" : true, "AZ-XCI" : true, "AZ-XIZ" : true, "AZ-XVD" : true, + "AZ-YAR" : true, "AZ-YE" : true, "AZ-YEV" : true, "AZ-ZAN" : true, "AZ-ZAQ" : true, + "AZ-ZAR" : true, "BA-01" : true, "BA-02" : true, "BA-03" : true, "BA-04" : true, + "BA-05" : true, "BA-06" : true, "BA-07" : true, "BA-08" : true, "BA-09" : true, + "BA-10" : true, "BA-BIH" : true, "BA-BRC" : true, "BA-SRP" : true, "BB-01" : true, + "BB-02" : true, "BB-03" : true, "BB-04" : true, "BB-05" : true, "BB-06" : true, + "BB-07" : true, "BB-08" : true, "BB-09" : true, "BB-10" : true, "BB-11" : true, + "BD-01" : true, "BD-02" : true, "BD-03" : true, "BD-04" : true, "BD-05" : true, + "BD-06" : true, "BD-07" : true, "BD-08" : true, "BD-09" : true, "BD-10" : true, + "BD-11" : true, "BD-12" : true, "BD-13" : true, "BD-14" : true, "BD-15" : true, + "BD-16" : true, "BD-17" : true, "BD-18" : true, "BD-19" : true, "BD-20" : true, + "BD-21" : true, "BD-22" : true, "BD-23" : true, "BD-24" : true, "BD-25" : true, + "BD-26" : true, "BD-27" : true, "BD-28" : true, "BD-29" : true, "BD-30" : true, + "BD-31" : true, "BD-32" : true, "BD-33" : true, "BD-34" : true, "BD-35" : true, + "BD-36" : true, "BD-37" : true, "BD-38" : true, "BD-39" : true, "BD-40" : true, + "BD-41" : true, "BD-42" : true, "BD-43" : true, "BD-44" : true, "BD-45" : true, + "BD-46" : true, "BD-47" : true, "BD-48" : true, "BD-49" : true, "BD-50" : true, + "BD-51" : true, "BD-52" : true, "BD-53" : true, "BD-54" : true, "BD-55" : true, + "BD-56" : true, "BD-57" : true, "BD-58" : true, "BD-59" : true, "BD-60" : true, + "BD-61" : true, "BD-62" : true, "BD-63" : true, "BD-64" : true, "BD-A" : true, + "BD-B" : true, "BD-C" : true, "BD-D" : true, "BD-E" : true, "BD-F" : true, + "BD-G" : true, "BE-BRU" : true, "BE-VAN" : true, "BE-VBR" : true, "BE-VLG" : true, + "BE-VLI" : true, "BE-VOV" : true, "BE-VWV" : true, "BE-WAL" : true, "BE-WBR" : true, + "BE-WHT" : true, "BE-WLG" : true, "BE-WLX" : true, "BE-WNA" : true, "BF-01" : true, + "BF-02" : true, "BF-03" : true, "BF-04" : true, "BF-05" : true, "BF-06" : true, + "BF-07" : true, "BF-08" : true, "BF-09" : true, "BF-10" : true, "BF-11" : true, + "BF-12" : true, "BF-13" : true, "BF-BAL" : true, "BF-BAM" : true, "BF-BAN" : true, + "BF-BAZ" : true, "BF-BGR" : true, "BF-BLG" : true, "BF-BLK" : true, "BF-COM" : true, + "BF-GAN" : true, "BF-GNA" : true, "BF-GOU" : true, "BF-HOU" : true, "BF-IOB" : true, + "BF-KAD" : true, "BF-KEN" : true, "BF-KMD" : true, "BF-KMP" : true, "BF-KOP" : true, + "BF-KOS" : true, "BF-KOT" : true, "BF-KOW" : true, "BF-LER" : true, "BF-LOR" : true, + "BF-MOU" : true, "BF-NAM" : true, "BF-NAO" : true, "BF-NAY" : true, "BF-NOU" : true, + "BF-OUB" : true, "BF-OUD" : true, "BF-PAS" : true, "BF-PON" : true, "BF-SEN" : true, + "BF-SIS" : true, "BF-SMT" : true, "BF-SNG" : true, "BF-SOM" : true, "BF-SOR" : true, + "BF-TAP" : true, "BF-TUI" : true, "BF-YAG" : true, "BF-YAT" : true, "BF-ZIR" : true, + "BF-ZON" : true, "BF-ZOU" : true, "BG-01" : true, "BG-02" : true, "BG-03" : true, + "BG-04" : true, "BG-05" : true, "BG-06" : true, "BG-07" : true, "BG-08" : true, + "BG-09" : true, "BG-10" : true, "BG-11" : true, "BG-12" : true, "BG-13" : true, + "BG-14" : true, "BG-15" : true, "BG-16" : true, "BG-17" : true, "BG-18" : true, + "BG-19" : true, "BG-20" : true, "BG-21" : true, "BG-22" : true, "BG-23" : true, + "BG-24" : true, "BG-25" : true, "BG-26" : true, "BG-27" : true, "BG-28" : true, + "BH-13" : true, "BH-14" : true, "BH-15" : true, "BH-16" : true, "BH-17" : true, + "BI-BB" : true, "BI-BL" : true, "BI-BM" : true, "BI-BR" : true, "BI-CA" : true, + "BI-CI" : true, "BI-GI" : true, "BI-KI" : true, "BI-KR" : true, "BI-KY" : true, + "BI-MA" : true, "BI-MU" : true, "BI-MW" : true, "BI-NG" : true, "BI-RT" : true, + "BI-RY" : true, "BJ-AK" : true, "BJ-AL" : true, "BJ-AQ" : true, "BJ-BO" : true, + "BJ-CO" : true, "BJ-DO" : true, "BJ-KO" : true, "BJ-LI" : true, "BJ-MO" : true, + "BJ-OU" : true, "BJ-PL" : true, "BJ-ZO" : true, "BN-BE" : true, "BN-BM" : true, + "BN-TE" : true, "BN-TU" : true, "BO-B" : true, "BO-C" : true, "BO-H" : true, + "BO-L" : true, "BO-N" : true, "BO-O" : true, "BO-P" : true, "BO-S" : true, + "BO-T" : true, "BQ-BO" : true, "BQ-SA" : true, "BQ-SE" : true, "BR-AC" : true, + "BR-AL" : true, "BR-AM" : true, "BR-AP" : true, "BR-BA" : true, "BR-CE" : true, + "BR-DF" : true, "BR-ES" : true, "BR-FN" : true, "BR-GO" : true, "BR-MA" : true, + "BR-MG" : true, "BR-MS" : true, "BR-MT" : true, "BR-PA" : true, "BR-PB" : true, + "BR-PE" : true, "BR-PI" : true, "BR-PR" : true, "BR-RJ" : true, "BR-RN" : true, + "BR-RO" : true, "BR-RR" : true, "BR-RS" : true, "BR-SC" : true, "BR-SE" : true, + "BR-SP" : true, "BR-TO" : true, "BS-AK" : true, "BS-BI" : true, "BS-BP" : true, + "BS-BY" : true, "BS-CE" : true, "BS-CI" : true, "BS-CK" : true, "BS-CO" : true, + "BS-CS" : true, "BS-EG" : true, "BS-EX" : true, "BS-FP" : true, "BS-GC" : true, + "BS-HI" : true, "BS-HT" : true, "BS-IN" : true, "BS-LI" : true, "BS-MC" : true, + "BS-MG" : true, "BS-MI" : true, "BS-NE" : true, "BS-NO" : true, "BS-NS" : true, + "BS-RC" : true, "BS-RI" : true, "BS-SA" : true, "BS-SE" : true, "BS-SO" : true, + "BS-SS" : true, "BS-SW" : true, "BS-WG" : true, "BT-11" : true, "BT-12" : true, + "BT-13" : true, "BT-14" : true, "BT-15" : true, "BT-21" : true, "BT-22" : true, + "BT-23" : true, "BT-24" : true, "BT-31" : true, "BT-32" : true, "BT-33" : true, + "BT-34" : true, "BT-41" : true, "BT-42" : true, "BT-43" : true, "BT-44" : true, + "BT-45" : true, "BT-GA" : true, "BT-TY" : true, "BW-CE" : true, "BW-GH" : true, + "BW-KG" : true, "BW-KL" : true, "BW-KW" : true, "BW-NE" : true, "BW-NW" : true, + "BW-SE" : true, "BW-SO" : true, "BY-BR" : true, "BY-HM" : true, "BY-HO" : true, + "BY-HR" : true, "BY-MA" : true, "BY-MI" : true, "BY-VI" : true, "BZ-BZ" : true, + "BZ-CY" : true, "BZ-CZL" : true, "BZ-OW" : true, "BZ-SC" : true, "BZ-TOL" : true, + "CA-AB" : true, "CA-BC" : true, "CA-MB" : true, "CA-NB" : true, "CA-NL" : true, + "CA-NS" : true, "CA-NT" : true, "CA-NU" : true, "CA-ON" : true, "CA-PE" : true, + "CA-QC" : true, "CA-SK" : true, "CA-YT" : true, "CD-BC" : true, "CD-BN" : true, + "CD-EQ" : true, "CD-KA" : true, "CD-KE" : true, "CD-KN" : true, "CD-KW" : true, + "CD-MA" : true, "CD-NK" : true, "CD-OR" : true, "CD-SK" : true, "CF-AC" : true, + "CF-BB" : true, "CF-BGF" : true, "CF-BK" : true, "CF-HK" : true, "CF-HM" : true, + "CF-HS" : true, "CF-KB" : true, "CF-KG" : true, "CF-LB" : true, "CF-MB" : true, + "CF-MP" : true, "CF-NM" : true, "CF-OP" : true, "CF-SE" : true, "CF-UK" : true, + "CF-VK" : true, "CG-11" : true, "CG-12" : true, "CG-13" : true, "CG-14" : true, + "CG-15" : true, "CG-2" : true, "CG-5" : true, "CG-7" : true, "CG-8" : true, + "CG-9" : true, "CG-BZV" : true, "CH-AG" : true, "CH-AI" : true, "CH-AR" : true, + "CH-BE" : true, "CH-BL" : true, "CH-BS" : true, "CH-FR" : true, "CH-GE" : true, + "CH-GL" : true, "CH-GR" : true, "CH-JU" : true, "CH-LU" : true, "CH-NE" : true, + "CH-NW" : true, "CH-OW" : true, "CH-SG" : true, "CH-SH" : true, "CH-SO" : true, + "CH-SZ" : true, "CH-TG" : true, "CH-TI" : true, "CH-UR" : true, "CH-VD" : true, + "CH-VS" : true, "CH-ZG" : true, "CH-ZH" : true, "CI-01" : true, "CI-02" : true, + "CI-03" : true, "CI-04" : true, "CI-05" : true, "CI-06" : true, "CI-07" : true, + "CI-08" : true, "CI-09" : true, "CI-10" : true, "CI-11" : true, "CI-12" : true, + "CI-13" : true, "CI-14" : true, "CI-15" : true, "CI-16" : true, "CI-17" : true, + "CI-18" : true, "CI-19" : true, "CL-AI" : true, "CL-AN" : true, "CL-AP" : true, + "CL-AR" : true, "CL-AT" : true, "CL-BI" : true, "CL-CO" : true, "CL-LI" : true, + "CL-LL" : true, "CL-LR" : true, "CL-MA" : true, "CL-ML" : true, "CL-RM" : true, + "CL-TA" : true, "CL-VS" : true, "CM-AD" : true, "CM-CE" : true, "CM-EN" : true, + "CM-ES" : true, "CM-LT" : true, "CM-NO" : true, "CM-NW" : true, "CM-OU" : true, + "CM-SU" : true, "CM-SW" : true, "CN-11" : true, "CN-12" : true, "CN-13" : true, + "CN-14" : true, "CN-15" : true, "CN-21" : true, "CN-22" : true, "CN-23" : true, + "CN-31" : true, "CN-32" : true, "CN-33" : true, "CN-34" : true, "CN-35" : true, + "CN-36" : true, "CN-37" : true, "CN-41" : true, "CN-42" : true, "CN-43" : true, + "CN-44" : true, "CN-45" : true, "CN-46" : true, "CN-50" : true, "CN-51" : true, + "CN-52" : true, "CN-53" : true, "CN-54" : true, "CN-61" : true, "CN-62" : true, + "CN-63" : true, "CN-64" : true, "CN-65" : true, "CN-71" : true, "CN-91" : true, + "CN-92" : true, "CO-AMA" : true, "CO-ANT" : true, "CO-ARA" : true, "CO-ATL" : true, + "CO-BOL" : true, "CO-BOY" : true, "CO-CAL" : true, "CO-CAQ" : true, "CO-CAS" : true, + "CO-CAU" : true, "CO-CES" : true, "CO-CHO" : true, "CO-COR" : true, "CO-CUN" : true, + "CO-DC" : true, "CO-GUA" : true, "CO-GUV" : true, "CO-HUI" : true, "CO-LAG" : true, + "CO-MAG" : true, "CO-MET" : true, "CO-NAR" : true, "CO-NSA" : true, "CO-PUT" : true, + "CO-QUI" : true, "CO-RIS" : true, "CO-SAN" : true, "CO-SAP" : true, "CO-SUC" : true, + "CO-TOL" : true, "CO-VAC" : true, "CO-VAU" : true, "CO-VID" : true, "CR-A" : true, + "CR-C" : true, "CR-G" : true, "CR-H" : true, "CR-L" : true, "CR-P" : true, + "CR-SJ" : true, "CU-01" : true, "CU-02" : true, "CU-03" : true, "CU-04" : true, + "CU-05" : true, "CU-06" : true, "CU-07" : true, "CU-08" : true, "CU-09" : true, + "CU-10" : true, "CU-11" : true, "CU-12" : true, "CU-13" : true, "CU-14" : true, + "CU-99" : true, "CV-B" : true, "CV-BR" : true, "CV-BV" : true, "CV-CA" : true, + "CV-CF" : true, "CV-CR" : true, "CV-MA" : true, "CV-MO" : true, "CV-PA" : true, + "CV-PN" : true, "CV-PR" : true, "CV-RB" : true, "CV-RG" : true, "CV-RS" : true, + "CV-S" : true, "CV-SD" : true, "CV-SF" : true, "CV-SL" : true, "CV-SM" : true, + "CV-SO" : true, "CV-SS" : true, "CV-SV" : true, "CV-TA" : true, "CV-TS" : true, + "CY-01" : true, "CY-02" : true, "CY-03" : true, "CY-04" : true, "CY-05" : true, + "CY-06" : true, "CZ-10" : true, "CZ-101" : true, "CZ-102" : true, "CZ-103" : true, + "CZ-104" : true, "CZ-105" : true, "CZ-106" : true, "CZ-107" : true, "CZ-108" : true, + "CZ-109" : true, "CZ-110" : true, "CZ-111" : true, "CZ-112" : true, "CZ-113" : true, + "CZ-114" : true, "CZ-115" : true, "CZ-116" : true, "CZ-117" : true, "CZ-118" : true, + "CZ-119" : true, "CZ-120" : true, "CZ-121" : true, "CZ-122" : true, "CZ-20" : true, + "CZ-201" : true, "CZ-202" : true, "CZ-203" : true, "CZ-204" : true, "CZ-205" : true, + "CZ-206" : true, "CZ-207" : true, "CZ-208" : true, "CZ-209" : true, "CZ-20A" : true, + "CZ-20B" : true, "CZ-20C" : true, "CZ-31" : true, "CZ-311" : true, "CZ-312" : true, + "CZ-313" : true, "CZ-314" : true, "CZ-315" : true, "CZ-316" : true, "CZ-317" : true, + "CZ-32" : true, "CZ-321" : true, "CZ-322" : true, "CZ-323" : true, "CZ-324" : true, + "CZ-325" : true, "CZ-326" : true, "CZ-327" : true, "CZ-41" : true, "CZ-411" : true, + "CZ-412" : true, "CZ-413" : true, "CZ-42" : true, "CZ-421" : true, "CZ-422" : true, + "CZ-423" : true, "CZ-424" : true, "CZ-425" : true, "CZ-426" : true, "CZ-427" : true, + "CZ-51" : true, "CZ-511" : true, "CZ-512" : true, "CZ-513" : true, "CZ-514" : true, + "CZ-52" : true, "CZ-521" : true, "CZ-522" : true, "CZ-523" : true, "CZ-524" : true, + "CZ-525" : true, "CZ-53" : true, "CZ-531" : true, "CZ-532" : true, "CZ-533" : true, + "CZ-534" : true, "CZ-63" : true, "CZ-631" : true, "CZ-632" : true, "CZ-633" : true, + "CZ-634" : true, "CZ-635" : true, "CZ-64" : true, "CZ-641" : true, "CZ-642" : true, + "CZ-643" : true, "CZ-644" : true, "CZ-645" : true, "CZ-646" : true, "CZ-647" : true, + "CZ-71" : true, "CZ-711" : true, "CZ-712" : true, "CZ-713" : true, "CZ-714" : true, + "CZ-715" : true, "CZ-72" : true, "CZ-721" : true, "CZ-722" : true, "CZ-723" : true, + "CZ-724" : true, "CZ-80" : true, "CZ-801" : true, "CZ-802" : true, "CZ-803" : true, + "CZ-804" : true, "CZ-805" : true, "CZ-806" : true, "DE-BB" : true, "DE-BE" : true, + "DE-BW" : true, "DE-BY" : true, "DE-HB" : true, "DE-HE" : true, "DE-HH" : true, + "DE-MV" : true, "DE-NI" : true, "DE-NW" : true, "DE-RP" : true, "DE-SH" : true, + "DE-SL" : true, "DE-SN" : true, "DE-ST" : true, "DE-TH" : true, "DJ-AR" : true, + "DJ-AS" : true, "DJ-DI" : true, "DJ-DJ" : true, "DJ-OB" : true, "DJ-TA" : true, + "DK-81" : true, "DK-82" : true, "DK-83" : true, "DK-84" : true, "DK-85" : true, + "DM-01" : true, "DM-02" : true, "DM-03" : true, "DM-04" : true, "DM-05" : true, + "DM-06" : true, "DM-07" : true, "DM-08" : true, "DM-09" : true, "DM-10" : true, + "DO-01" : true, "DO-02" : true, "DO-03" : true, "DO-04" : true, "DO-05" : true, + "DO-06" : true, "DO-07" : true, "DO-08" : true, "DO-09" : true, "DO-10" : true, + "DO-11" : true, "DO-12" : true, "DO-13" : true, "DO-14" : true, "DO-15" : true, + "DO-16" : true, "DO-17" : true, "DO-18" : true, "DO-19" : true, "DO-20" : true, + "DO-21" : true, "DO-22" : true, "DO-23" : true, "DO-24" : true, "DO-25" : true, + "DO-26" : true, "DO-27" : true, "DO-28" : true, "DO-29" : true, "DO-30" : true, + "DZ-01" : true, "DZ-02" : true, "DZ-03" : true, "DZ-04" : true, "DZ-05" : true, + "DZ-06" : true, "DZ-07" : true, "DZ-08" : true, "DZ-09" : true, "DZ-10" : true, + "DZ-11" : true, "DZ-12" : true, "DZ-13" : true, "DZ-14" : true, "DZ-15" : true, + "DZ-16" : true, "DZ-17" : true, "DZ-18" : true, "DZ-19" : true, "DZ-20" : true, + "DZ-21" : true, "DZ-22" : true, "DZ-23" : true, "DZ-24" : true, "DZ-25" : true, + "DZ-26" : true, "DZ-27" : true, "DZ-28" : true, "DZ-29" : true, "DZ-30" : true, + "DZ-31" : true, "DZ-32" : true, "DZ-33" : true, "DZ-34" : true, "DZ-35" : true, + "DZ-36" : true, "DZ-37" : true, "DZ-38" : true, "DZ-39" : true, "DZ-40" : true, + "DZ-41" : true, "DZ-42" : true, "DZ-43" : true, "DZ-44" : true, "DZ-45" : true, + "DZ-46" : true, "DZ-47" : true, "DZ-48" : true, "EC-A" : true, "EC-B" : true, + "EC-C" : true, "EC-D" : true, "EC-E" : true, "EC-F" : true, "EC-G" : true, + "EC-H" : true, "EC-I" : true, "EC-L" : true, "EC-M" : true, "EC-N" : true, + "EC-O" : true, "EC-P" : true, "EC-R" : true, "EC-S" : true, "EC-SD" : true, + "EC-SE" : true, "EC-T" : true, "EC-U" : true, "EC-W" : true, "EC-X" : true, + "EC-Y" : true, "EC-Z" : true, "EE-37" : true, "EE-39" : true, "EE-44" : true, + "EE-49" : true, "EE-51" : true, "EE-57" : true, "EE-59" : true, "EE-65" : true, + "EE-67" : true, "EE-70" : true, "EE-74" : true, "EE-78" : true, "EE-82" : true, + "EE-84" : true, "EE-86" : true, "EG-ALX" : true, "EG-ASN" : true, "EG-AST" : true, + "EG-BA" : true, "EG-BH" : true, "EG-BNS" : true, "EG-C" : true, "EG-DK" : true, + "EG-DT" : true, "EG-FYM" : true, "EG-GH" : true, "EG-GZ" : true, "EG-HU" : true, + "EG-IS" : true, "EG-JS" : true, "EG-KB" : true, "EG-KFS" : true, "EG-KN" : true, + "EG-MN" : true, "EG-MNF" : true, "EG-MT" : true, "EG-PTS" : true, "EG-SHG" : true, + "EG-SHR" : true, "EG-SIN" : true, "EG-SU" : true, "EG-SUZ" : true, "EG-WAD" : true, + "ER-AN" : true, "ER-DK" : true, "ER-DU" : true, "ER-GB" : true, "ER-MA" : true, + "ER-SK" : true, "ES-A" : true, "ES-AB" : true, "ES-AL" : true, "ES-AN" : true, + "ES-AR" : true, "ES-AS" : true, "ES-AV" : true, "ES-B" : true, "ES-BA" : true, + "ES-BI" : true, "ES-BU" : true, "ES-C" : true, "ES-CA" : true, "ES-CB" : true, + "ES-CC" : true, "ES-CE" : true, "ES-CL" : true, "ES-CM" : true, "ES-CN" : true, + "ES-CO" : true, "ES-CR" : true, "ES-CS" : true, "ES-CT" : true, "ES-CU" : true, + "ES-EX" : true, "ES-GA" : true, "ES-GC" : true, "ES-GI" : true, "ES-GR" : true, + "ES-GU" : true, "ES-H" : true, "ES-HU" : true, "ES-IB" : true, "ES-J" : true, + "ES-L" : true, "ES-LE" : true, "ES-LO" : true, "ES-LU" : true, "ES-M" : true, + "ES-MA" : true, "ES-MC" : true, "ES-MD" : true, "ES-ML" : true, "ES-MU" : true, + "ES-NA" : true, "ES-NC" : true, "ES-O" : true, "ES-OR" : true, "ES-P" : true, + "ES-PM" : true, "ES-PO" : true, "ES-PV" : true, "ES-RI" : true, "ES-S" : true, + "ES-SA" : true, "ES-SE" : true, "ES-SG" : true, "ES-SO" : true, "ES-SS" : true, + "ES-T" : true, "ES-TE" : true, "ES-TF" : true, "ES-TO" : true, "ES-V" : true, + "ES-VA" : true, "ES-VC" : true, "ES-VI" : true, "ES-Z" : true, "ES-ZA" : true, + "ET-AA" : true, "ET-AF" : true, "ET-AM" : true, "ET-BE" : true, "ET-DD" : true, + "ET-GA" : true, "ET-HA" : true, "ET-OR" : true, "ET-SN" : true, "ET-SO" : true, + "ET-TI" : true, "FI-01" : true, "FI-02" : true, "FI-03" : true, "FI-04" : true, + "FI-05" : true, "FI-06" : true, "FI-07" : true, "FI-08" : true, "FI-09" : true, + "FI-10" : true, "FI-11" : true, "FI-12" : true, "FI-13" : true, "FI-14" : true, + "FI-15" : true, "FI-16" : true, "FI-17" : true, "FI-18" : true, "FI-19" : true, + "FJ-C" : true, "FJ-E" : true, "FJ-N" : true, "FJ-R" : true, "FJ-W" : true, + "FM-KSA" : true, "FM-PNI" : true, "FM-TRK" : true, "FM-YAP" : true, "FR-01" : true, + "FR-02" : true, "FR-03" : true, "FR-04" : true, "FR-05" : true, "FR-06" : true, + "FR-07" : true, "FR-08" : true, "FR-09" : true, "FR-10" : true, "FR-11" : true, + "FR-12" : true, "FR-13" : true, "FR-14" : true, "FR-15" : true, "FR-16" : true, + "FR-17" : true, "FR-18" : true, "FR-19" : true, "FR-21" : true, "FR-22" : true, + "FR-23" : true, "FR-24" : true, "FR-25" : true, "FR-26" : true, "FR-27" : true, + "FR-28" : true, "FR-29" : true, "FR-2A" : true, "FR-2B" : true, "FR-30" : true, + "FR-31" : true, "FR-32" : true, "FR-33" : true, "FR-34" : true, "FR-35" : true, + "FR-36" : true, "FR-37" : true, "FR-38" : true, "FR-39" : true, "FR-40" : true, + "FR-41" : true, "FR-42" : true, "FR-43" : true, "FR-44" : true, "FR-45" : true, + "FR-46" : true, "FR-47" : true, "FR-48" : true, "FR-49" : true, "FR-50" : true, + "FR-51" : true, "FR-52" : true, "FR-53" : true, "FR-54" : true, "FR-55" : true, + "FR-56" : true, "FR-57" : true, "FR-58" : true, "FR-59" : true, "FR-60" : true, + "FR-61" : true, "FR-62" : true, "FR-63" : true, "FR-64" : true, "FR-65" : true, + "FR-66" : true, "FR-67" : true, "FR-68" : true, "FR-69" : true, "FR-70" : true, + "FR-71" : true, "FR-72" : true, "FR-73" : true, "FR-74" : true, "FR-75" : true, + "FR-76" : true, "FR-77" : true, "FR-78" : true, "FR-79" : true, "FR-80" : true, + "FR-81" : true, "FR-82" : true, "FR-83" : true, "FR-84" : true, "FR-85" : true, + "FR-86" : true, "FR-87" : true, "FR-88" : true, "FR-89" : true, "FR-90" : true, + "FR-91" : true, "FR-92" : true, "FR-93" : true, "FR-94" : true, "FR-95" : true, + "FR-ARA" : true, "FR-BFC" : true, "FR-BL" : true, "FR-BRE" : true, "FR-COR" : true, + "FR-CP" : true, "FR-CVL" : true, "FR-GES" : true, "FR-GF" : true, "FR-GP" : true, + "FR-GUA" : true, "FR-HDF" : true, "FR-IDF" : true, "FR-LRE" : true, "FR-MAY" : true, + "FR-MF" : true, "FR-MQ" : true, "FR-NAQ" : true, "FR-NC" : true, "FR-NOR" : true, + "FR-OCC" : true, "FR-PAC" : true, "FR-PDL" : true, "FR-PF" : true, "FR-PM" : true, + "FR-RE" : true, "FR-TF" : true, "FR-WF" : true, "FR-YT" : true, "GA-1" : true, + "GA-2" : true, "GA-3" : true, "GA-4" : true, "GA-5" : true, "GA-6" : true, + "GA-7" : true, "GA-8" : true, "GA-9" : true, "GB-ABC" : true, "GB-ABD" : true, + "GB-ABE" : true, "GB-AGB" : true, "GB-AGY" : true, "GB-AND" : true, "GB-ANN" : true, + "GB-ANS" : true, "GB-BAS" : true, "GB-BBD" : true, "GB-BDF" : true, "GB-BDG" : true, + "GB-BEN" : true, "GB-BEX" : true, "GB-BFS" : true, "GB-BGE" : true, "GB-BGW" : true, + "GB-BIR" : true, "GB-BKM" : true, "GB-BMH" : true, "GB-BNE" : true, "GB-BNH" : true, + "GB-BNS" : true, "GB-BOL" : true, "GB-BPL" : true, "GB-BRC" : true, "GB-BRD" : true, + "GB-BRY" : true, "GB-BST" : true, "GB-BUR" : true, "GB-CAM" : true, "GB-CAY" : true, + "GB-CBF" : true, "GB-CCG" : true, "GB-CGN" : true, "GB-CHE" : true, "GB-CHW" : true, + "GB-CLD" : true, "GB-CLK" : true, "GB-CMA" : true, "GB-CMD" : true, "GB-CMN" : true, + "GB-CON" : true, "GB-COV" : true, "GB-CRF" : true, "GB-CRY" : true, "GB-CWY" : true, + "GB-DAL" : true, "GB-DBY" : true, "GB-DEN" : true, "GB-DER" : true, "GB-DEV" : true, + "GB-DGY" : true, "GB-DNC" : true, "GB-DND" : true, "GB-DOR" : true, "GB-DRS" : true, + "GB-DUD" : true, "GB-DUR" : true, "GB-EAL" : true, "GB-EAW" : true, "GB-EAY" : true, + "GB-EDH" : true, "GB-EDU" : true, "GB-ELN" : true, "GB-ELS" : true, "GB-ENF" : true, + "GB-ENG" : true, "GB-ERW" : true, "GB-ERY" : true, "GB-ESS" : true, "GB-ESX" : true, + "GB-FAL" : true, "GB-FIF" : true, "GB-FLN" : true, "GB-FMO" : true, "GB-GAT" : true, + "GB-GBN" : true, "GB-GLG" : true, "GB-GLS" : true, "GB-GRE" : true, "GB-GWN" : true, + "GB-HAL" : true, "GB-HAM" : true, "GB-HAV" : true, "GB-HCK" : true, "GB-HEF" : true, + "GB-HIL" : true, "GB-HLD" : true, "GB-HMF" : true, "GB-HNS" : true, "GB-HPL" : true, + "GB-HRT" : true, "GB-HRW" : true, "GB-HRY" : true, "GB-IOS" : true, "GB-IOW" : true, + "GB-ISL" : true, "GB-IVC" : true, "GB-KEC" : true, "GB-KEN" : true, "GB-KHL" : true, + "GB-KIR" : true, "GB-KTT" : true, "GB-KWL" : true, "GB-LAN" : true, "GB-LBC" : true, + "GB-LBH" : true, "GB-LCE" : true, "GB-LDS" : true, "GB-LEC" : true, "GB-LEW" : true, + "GB-LIN" : true, "GB-LIV" : true, "GB-LND" : true, "GB-LUT" : true, "GB-MAN" : true, + "GB-MDB" : true, "GB-MDW" : true, "GB-MEA" : true, "GB-MIK" : true, "GD-01" : true, + "GB-MLN" : true, "GB-MON" : true, "GB-MRT" : true, "GB-MRY" : true, "GB-MTY" : true, + "GB-MUL" : true, "GB-NAY" : true, "GB-NBL" : true, "GB-NEL" : true, "GB-NET" : true, + "GB-NFK" : true, "GB-NGM" : true, "GB-NIR" : true, "GB-NLK" : true, "GB-NLN" : true, + "GB-NMD" : true, "GB-NSM" : true, "GB-NTH" : true, "GB-NTL" : true, "GB-NTT" : true, + "GB-NTY" : true, "GB-NWM" : true, "GB-NWP" : true, "GB-NYK" : true, "GB-OLD" : true, + "GB-ORK" : true, "GB-OXF" : true, "GB-PEM" : true, "GB-PKN" : true, "GB-PLY" : true, + "GB-POL" : true, "GB-POR" : true, "GB-POW" : true, "GB-PTE" : true, "GB-RCC" : true, + "GB-RCH" : true, "GB-RCT" : true, "GB-RDB" : true, "GB-RDG" : true, "GB-RFW" : true, + "GB-RIC" : true, "GB-ROT" : true, "GB-RUT" : true, "GB-SAW" : true, "GB-SAY" : true, + "GB-SCB" : true, "GB-SCT" : true, "GB-SFK" : true, "GB-SFT" : true, "GB-SGC" : true, + "GB-SHF" : true, "GB-SHN" : true, "GB-SHR" : true, "GB-SKP" : true, "GB-SLF" : true, + "GB-SLG" : true, "GB-SLK" : true, "GB-SND" : true, "GB-SOL" : true, "GB-SOM" : true, + "GB-SOS" : true, "GB-SRY" : true, "GB-STE" : true, "GB-STG" : true, "GB-STH" : true, + "GB-STN" : true, "GB-STS" : true, "GB-STT" : true, "GB-STY" : true, "GB-SWA" : true, + "GB-SWD" : true, "GB-SWK" : true, "GB-TAM" : true, "GB-TFW" : true, "GB-THR" : true, + "GB-TOB" : true, "GB-TOF" : true, "GB-TRF" : true, "GB-TWH" : true, "GB-UKM" : true, + "GB-VGL" : true, "GB-WAR" : true, "GB-WBK" : true, "GB-WDU" : true, "GB-WFT" : true, + "GB-WGN" : true, "GB-WIL" : true, "GB-WKF" : true, "GB-WLL" : true, "GB-WLN" : true, + "GB-WLS" : true, "GB-WLV" : true, "GB-WND" : true, "GB-WNM" : true, "GB-WOK" : true, + "GB-WOR" : true, "GB-WRL" : true, "GB-WRT" : true, "GB-WRX" : true, "GB-WSM" : true, + "GB-WSX" : true, "GB-YOR" : true, "GB-ZET" : true, "GD-02" : true, "GD-03" : true, + "GD-04" : true, "GD-05" : true, "GD-06" : true, "GD-10" : true, "GE-AB" : true, + "GE-AJ" : true, "GE-GU" : true, "GE-IM" : true, "GE-KA" : true, "GE-KK" : true, + "GE-MM" : true, "GE-RL" : true, "GE-SJ" : true, "GE-SK" : true, "GE-SZ" : true, + "GE-TB" : true, "GH-AA" : true, "GH-AH" : true, "GH-BA" : true, "GH-CP" : true, + "GH-EP" : true, "GH-NP" : true, "GH-TV" : true, "GH-UE" : true, "GH-UW" : true, + "GH-WP" : true, "GL-KU" : true, "GL-QA" : true, "GL-QE" : true, "GL-SM" : true, + "GM-B" : true, "GM-L" : true, "GM-M" : true, "GM-N" : true, "GM-U" : true, + "GM-W" : true, "GN-B" : true, "GN-BE" : true, "GN-BF" : true, "GN-BK" : true, + "GN-C" : true, "GN-CO" : true, "GN-D" : true, "GN-DB" : true, "GN-DI" : true, + "GN-DL" : true, "GN-DU" : true, "GN-F" : true, "GN-FA" : true, "GN-FO" : true, + "GN-FR" : true, "GN-GA" : true, "GN-GU" : true, "GN-K" : true, "GN-KA" : true, + "GN-KB" : true, "GN-KD" : true, "GN-KE" : true, "GN-KN" : true, "GN-KO" : true, + "GN-KS" : true, "GN-L" : true, "GN-LA" : true, "GN-LE" : true, "GN-LO" : true, + "GN-M" : true, "GN-MC" : true, "GN-MD" : true, "GN-ML" : true, "GN-MM" : true, + "GN-N" : true, "GN-NZ" : true, "GN-PI" : true, "GN-SI" : true, "GN-TE" : true, + "GN-TO" : true, "GN-YO" : true, "GQ-AN" : true, "GQ-BN" : true, "GQ-BS" : true, + "GQ-C" : true, "GQ-CS" : true, "GQ-I" : true, "GQ-KN" : true, "GQ-LI" : true, + "GQ-WN" : true, "GR-01" : true, "GR-03" : true, "GR-04" : true, "GR-05" : true, + "GR-06" : true, "GR-07" : true, "GR-11" : true, "GR-12" : true, "GR-13" : true, + "GR-14" : true, "GR-15" : true, "GR-16" : true, "GR-17" : true, "GR-21" : true, + "GR-22" : true, "GR-23" : true, "GR-24" : true, "GR-31" : true, "GR-32" : true, + "GR-33" : true, "GR-34" : true, "GR-41" : true, "GR-42" : true, "GR-43" : true, + "GR-44" : true, "GR-51" : true, "GR-52" : true, "GR-53" : true, "GR-54" : true, + "GR-55" : true, "GR-56" : true, "GR-57" : true, "GR-58" : true, "GR-59" : true, + "GR-61" : true, "GR-62" : true, "GR-63" : true, "GR-64" : true, "GR-69" : true, + "GR-71" : true, "GR-72" : true, "GR-73" : true, "GR-81" : true, "GR-82" : true, + "GR-83" : true, "GR-84" : true, "GR-85" : true, "GR-91" : true, "GR-92" : true, + "GR-93" : true, "GR-94" : true, "GR-A" : true, "GR-A1" : true, "GR-B" : true, + "GR-C" : true, "GR-D" : true, "GR-E" : true, "GR-F" : true, "GR-G" : true, + "GR-H" : true, "GR-I" : true, "GR-J" : true, "GR-K" : true, "GR-L" : true, + "GR-M" : true, "GT-AV" : true, "GT-BV" : true, "GT-CM" : true, "GT-CQ" : true, + "GT-ES" : true, "GT-GU" : true, "GT-HU" : true, "GT-IZ" : true, "GT-JA" : true, + "GT-JU" : true, "GT-PE" : true, "GT-PR" : true, "GT-QC" : true, "GT-QZ" : true, + "GT-RE" : true, "GT-SA" : true, "GT-SM" : true, "GT-SO" : true, "GT-SR" : true, + "GT-SU" : true, "GT-TO" : true, "GT-ZA" : true, "GW-BA" : true, "GW-BL" : true, + "GW-BM" : true, "GW-BS" : true, "GW-CA" : true, "GW-GA" : true, "GW-L" : true, + "GW-N" : true, "GW-OI" : true, "GW-QU" : true, "GW-S" : true, "GW-TO" : true, + "GY-BA" : true, "GY-CU" : true, "GY-DE" : true, "GY-EB" : true, "GY-ES" : true, + "GY-MA" : true, "GY-PM" : true, "GY-PT" : true, "GY-UD" : true, "GY-UT" : true, + "HN-AT" : true, "HN-CH" : true, "HN-CL" : true, "HN-CM" : true, "HN-CP" : true, + "HN-CR" : true, "HN-EP" : true, "HN-FM" : true, "HN-GD" : true, "HN-IB" : true, + "HN-IN" : true, "HN-LE" : true, "HN-LP" : true, "HN-OC" : true, "HN-OL" : true, + "HN-SB" : true, "HN-VA" : true, "HN-YO" : true, "HR-01" : true, "HR-02" : true, + "HR-03" : true, "HR-04" : true, "HR-05" : true, "HR-06" : true, "HR-07" : true, + "HR-08" : true, "HR-09" : true, "HR-10" : true, "HR-11" : true, "HR-12" : true, + "HR-13" : true, "HR-14" : true, "HR-15" : true, "HR-16" : true, "HR-17" : true, + "HR-18" : true, "HR-19" : true, "HR-20" : true, "HR-21" : true, "HT-AR" : true, + "HT-CE" : true, "HT-GA" : true, "HT-ND" : true, "HT-NE" : true, "HT-NO" : true, + "HT-OU" : true, "HT-SD" : true, "HT-SE" : true, "HU-BA" : true, "HU-BC" : true, + "HU-BE" : true, "HU-BK" : true, "HU-BU" : true, "HU-BZ" : true, "HU-CS" : true, + "HU-DE" : true, "HU-DU" : true, "HU-EG" : true, "HU-ER" : true, "HU-FE" : true, + "HU-GS" : true, "HU-GY" : true, "HU-HB" : true, "HU-HE" : true, "HU-HV" : true, + "HU-JN" : true, "HU-KE" : true, "HU-KM" : true, "HU-KV" : true, "HU-MI" : true, + "HU-NK" : true, "HU-NO" : true, "HU-NY" : true, "HU-PE" : true, "HU-PS" : true, + "HU-SD" : true, "HU-SF" : true, "HU-SH" : true, "HU-SK" : true, "HU-SN" : true, + "HU-SO" : true, "HU-SS" : true, "HU-ST" : true, "HU-SZ" : true, "HU-TB" : true, + "HU-TO" : true, "HU-VA" : true, "HU-VE" : true, "HU-VM" : true, "HU-ZA" : true, + "HU-ZE" : true, "ID-AC" : true, "ID-BA" : true, "ID-BB" : true, "ID-BE" : true, + "ID-BT" : true, "ID-GO" : true, "ID-IJ" : true, "ID-JA" : true, "ID-JB" : true, + "ID-JI" : true, "ID-JK" : true, "ID-JT" : true, "ID-JW" : true, "ID-KA" : true, + "ID-KB" : true, "ID-KI" : true, "ID-KR" : true, "ID-KS" : true, "ID-KT" : true, + "ID-LA" : true, "ID-MA" : true, "ID-ML" : true, "ID-MU" : true, "ID-NB" : true, + "ID-NT" : true, "ID-NU" : true, "ID-PA" : true, "ID-PB" : true, "ID-RI" : true, + "ID-SA" : true, "ID-SB" : true, "ID-SG" : true, "ID-SL" : true, "ID-SM" : true, + "ID-SN" : true, "ID-SR" : true, "ID-SS" : true, "ID-ST" : true, "ID-SU" : true, + "ID-YO" : true, "IE-C" : true, "IE-CE" : true, "IE-CN" : true, "IE-CO" : true, + "IE-CW" : true, "IE-D" : true, "IE-DL" : true, "IE-G" : true, "IE-KE" : true, + "IE-KK" : true, "IE-KY" : true, "IE-L" : true, "IE-LD" : true, "IE-LH" : true, + "IE-LK" : true, "IE-LM" : true, "IE-LS" : true, "IE-M" : true, "IE-MH" : true, + "IE-MN" : true, "IE-MO" : true, "IE-OY" : true, "IE-RN" : true, "IE-SO" : true, + "IE-TA" : true, "IE-U" : true, "IE-WD" : true, "IE-WH" : true, "IE-WW" : true, + "IE-WX" : true, "IL-D" : true, "IL-HA" : true, "IL-JM" : true, "IL-M" : true, + "IL-TA" : true, "IL-Z" : true, "IN-AN" : true, "IN-AP" : true, "IN-AR" : true, + "IN-AS" : true, "IN-BR" : true, "IN-CH" : true, "IN-CT" : true, "IN-DD" : true, + "IN-DL" : true, "IN-DN" : true, "IN-GA" : true, "IN-GJ" : true, "IN-HP" : true, + "IN-HR" : true, "IN-JH" : true, "IN-JK" : true, "IN-KA" : true, "IN-KL" : true, + "IN-LD" : true, "IN-MH" : true, "IN-ML" : true, "IN-MN" : true, "IN-MP" : true, + "IN-MZ" : true, "IN-NL" : true, "IN-OR" : true, "IN-PB" : true, "IN-PY" : true, + "IN-RJ" : true, "IN-SK" : true, "IN-TN" : true, "IN-TR" : true, "IN-UP" : true, + "IN-UT" : true, "IN-WB" : true, "IQ-AN" : true, "IQ-AR" : true, "IQ-BA" : true, + "IQ-BB" : true, "IQ-BG" : true, "IQ-DA" : true, "IQ-DI" : true, "IQ-DQ" : true, + "IQ-KA" : true, "IQ-MA" : true, "IQ-MU" : true, "IQ-NA" : true, "IQ-NI" : true, + "IQ-QA" : true, "IQ-SD" : true, "IQ-SW" : true, "IQ-TS" : true, "IQ-WA" : true, + "IR-01" : true, "IR-02" : true, "IR-03" : true, "IR-04" : true, "IR-05" : true, + "IR-06" : true, "IR-07" : true, "IR-08" : true, "IR-10" : true, "IR-11" : true, + "IR-12" : true, "IR-13" : true, "IR-14" : true, "IR-15" : true, "IR-16" : true, + "IR-17" : true, "IR-18" : true, "IR-19" : true, "IR-20" : true, "IR-21" : true, + "IR-22" : true, "IR-23" : true, "IR-24" : true, "IR-25" : true, "IR-26" : true, + "IR-27" : true, "IR-28" : true, "IR-29" : true, "IR-30" : true, "IR-31" : true, + "IS-0" : true, "IS-1" : true, "IS-2" : true, "IS-3" : true, "IS-4" : true, + "IS-5" : true, "IS-6" : true, "IS-7" : true, "IS-8" : true, "IT-21" : true, + "IT-23" : true, "IT-25" : true, "IT-32" : true, "IT-34" : true, "IT-36" : true, + "IT-42" : true, "IT-45" : true, "IT-52" : true, "IT-55" : true, "IT-57" : true, + "IT-62" : true, "IT-65" : true, "IT-67" : true, "IT-72" : true, "IT-75" : true, + "IT-77" : true, "IT-78" : true, "IT-82" : true, "IT-88" : true, "IT-AG" : true, + "IT-AL" : true, "IT-AN" : true, "IT-AO" : true, "IT-AP" : true, "IT-AQ" : true, + "IT-AR" : true, "IT-AT" : true, "IT-AV" : true, "IT-BA" : true, "IT-BG" : true, + "IT-BI" : true, "IT-BL" : true, "IT-BN" : true, "IT-BO" : true, "IT-BR" : true, + "IT-BS" : true, "IT-BT" : true, "IT-BZ" : true, "IT-CA" : true, "IT-CB" : true, + "IT-CE" : true, "IT-CH" : true, "IT-CI" : true, "IT-CL" : true, "IT-CN" : true, + "IT-CO" : true, "IT-CR" : true, "IT-CS" : true, "IT-CT" : true, "IT-CZ" : true, + "IT-EN" : true, "IT-FC" : true, "IT-FE" : true, "IT-FG" : true, "IT-FI" : true, + "IT-FM" : true, "IT-FR" : true, "IT-GE" : true, "IT-GO" : true, "IT-GR" : true, + "IT-IM" : true, "IT-IS" : true, "IT-KR" : true, "IT-LC" : true, "IT-LE" : true, + "IT-LI" : true, "IT-LO" : true, "IT-LT" : true, "IT-LU" : true, "IT-MB" : true, + "IT-MC" : true, "IT-ME" : true, "IT-MI" : true, "IT-MN" : true, "IT-MO" : true, + "IT-MS" : true, "IT-MT" : true, "IT-NA" : true, "IT-NO" : true, "IT-NU" : true, + "IT-OG" : true, "IT-OR" : true, "IT-OT" : true, "IT-PA" : true, "IT-PC" : true, + "IT-PD" : true, "IT-PE" : true, "IT-PG" : true, "IT-PI" : true, "IT-PN" : true, + "IT-PO" : true, "IT-PR" : true, "IT-PT" : true, "IT-PU" : true, "IT-PV" : true, + "IT-PZ" : true, "IT-RA" : true, "IT-RC" : true, "IT-RE" : true, "IT-RG" : true, + "IT-RI" : true, "IT-RM" : true, "IT-RN" : true, "IT-RO" : true, "IT-SA" : true, + "IT-SI" : true, "IT-SO" : true, "IT-SP" : true, "IT-SR" : true, "IT-SS" : true, + "IT-SV" : true, "IT-TA" : true, "IT-TE" : true, "IT-TN" : true, "IT-TO" : true, + "IT-TP" : true, "IT-TR" : true, "IT-TS" : true, "IT-TV" : true, "IT-UD" : true, + "IT-VA" : true, "IT-VB" : true, "IT-VC" : true, "IT-VE" : true, "IT-VI" : true, + "IT-VR" : true, "IT-VS" : true, "IT-VT" : true, "IT-VV" : true, "JM-01" : true, + "JM-02" : true, "JM-03" : true, "JM-04" : true, "JM-05" : true, "JM-06" : true, + "JM-07" : true, "JM-08" : true, "JM-09" : true, "JM-10" : true, "JM-11" : true, + "JM-12" : true, "JM-13" : true, "JM-14" : true, "JO-AJ" : true, "JO-AM" : true, + "JO-AQ" : true, "JO-AT" : true, "JO-AZ" : true, "JO-BA" : true, "JO-IR" : true, + "JO-JA" : true, "JO-KA" : true, "JO-MA" : true, "JO-MD" : true, "JO-MN" : true, + "JP-01" : true, "JP-02" : true, "JP-03" : true, "JP-04" : true, "JP-05" : true, + "JP-06" : true, "JP-07" : true, "JP-08" : true, "JP-09" : true, "JP-10" : true, + "JP-11" : true, "JP-12" : true, "JP-13" : true, "JP-14" : true, "JP-15" : true, + "JP-16" : true, "JP-17" : true, "JP-18" : true, "JP-19" : true, "JP-20" : true, + "JP-21" : true, "JP-22" : true, "JP-23" : true, "JP-24" : true, "JP-25" : true, + "JP-26" : true, "JP-27" : true, "JP-28" : true, "JP-29" : true, "JP-30" : true, + "JP-31" : true, "JP-32" : true, "JP-33" : true, "JP-34" : true, "JP-35" : true, + "JP-36" : true, "JP-37" : true, "JP-38" : true, "JP-39" : true, "JP-40" : true, + "JP-41" : true, "JP-42" : true, "JP-43" : true, "JP-44" : true, "JP-45" : true, + "JP-46" : true, "JP-47" : true, "KE-110" : true, "KE-200" : true, "KE-300" : true, + "KE-400" : true, "KE-500" : true, "KE-700" : true, "KE-800" : true, "KG-B" : true, + "KG-C" : true, "KG-GB" : true, "KG-J" : true, "KG-N" : true, "KG-O" : true, + "KG-T" : true, "KG-Y" : true, "KH-1" : true, "KH-10" : true, "KH-11" : true, + "KH-12" : true, "KH-13" : true, "KH-14" : true, "KH-15" : true, "KH-16" : true, + "KH-17" : true, "KH-18" : true, "KH-19" : true, "KH-2" : true, "KH-20" : true, + "KH-21" : true, "KH-22" : true, "KH-23" : true, "KH-24" : true, "KH-3" : true, + "KH-4" : true, "KH-5" : true, "KH-6" : true, "KH-7" : true, "KH-8" : true, + "KH-9" : true, "KI-G" : true, "KI-L" : true, "KI-P" : true, "KM-A" : true, + "KM-G" : true, "KM-M" : true, "KN-01" : true, "KN-02" : true, "KN-03" : true, + "KN-04" : true, "KN-05" : true, "KN-06" : true, "KN-07" : true, "KN-08" : true, + "KN-09" : true, "KN-10" : true, "KN-11" : true, "KN-12" : true, "KN-13" : true, + "KN-15" : true, "KN-K" : true, "KN-N" : true, "KP-01" : true, "KP-02" : true, + "KP-03" : true, "KP-04" : true, "KP-05" : true, "KP-06" : true, "KP-07" : true, + "KP-08" : true, "KP-09" : true, "KP-10" : true, "KP-13" : true, "KR-11" : true, + "KR-26" : true, "KR-27" : true, "KR-28" : true, "KR-29" : true, "KR-30" : true, + "KR-31" : true, "KR-41" : true, "KR-42" : true, "KR-43" : true, "KR-44" : true, + "KR-45" : true, "KR-46" : true, "KR-47" : true, "KR-48" : true, "KR-49" : true, + "KW-AH" : true, "KW-FA" : true, "KW-HA" : true, "KW-JA" : true, "KW-KU" : true, + "KW-MU" : true, "KZ-AKM" : true, "KZ-AKT" : true, "KZ-ALA" : true, "KZ-ALM" : true, + "KZ-AST" : true, "KZ-ATY" : true, "KZ-KAR" : true, "KZ-KUS" : true, "KZ-KZY" : true, + "KZ-MAN" : true, "KZ-PAV" : true, "KZ-SEV" : true, "KZ-VOS" : true, "KZ-YUZ" : true, + "KZ-ZAP" : true, "KZ-ZHA" : true, "LA-AT" : true, "LA-BK" : true, "LA-BL" : true, + "LA-CH" : true, "LA-HO" : true, "LA-KH" : true, "LA-LM" : true, "LA-LP" : true, + "LA-OU" : true, "LA-PH" : true, "LA-SL" : true, "LA-SV" : true, "LA-VI" : true, + "LA-VT" : true, "LA-XA" : true, "LA-XE" : true, "LA-XI" : true, "LA-XS" : true, + "LB-AK" : true, "LB-AS" : true, "LB-BA" : true, "LB-BH" : true, "LB-BI" : true, + "LB-JA" : true, "LB-JL" : true, "LB-NA" : true, "LI-01" : true, "LI-02" : true, + "LI-03" : true, "LI-04" : true, "LI-05" : true, "LI-06" : true, "LI-07" : true, + "LI-08" : true, "LI-09" : true, "LI-10" : true, "LI-11" : true, "LK-1" : true, + "LK-11" : true, "LK-12" : true, "LK-13" : true, "LK-2" : true, "LK-21" : true, + "LK-22" : true, "LK-23" : true, "LK-3" : true, "LK-31" : true, "LK-32" : true, + "LK-33" : true, "LK-4" : true, "LK-41" : true, "LK-42" : true, "LK-43" : true, + "LK-44" : true, "LK-45" : true, "LK-5" : true, "LK-51" : true, "LK-52" : true, + "LK-53" : true, "LK-6" : true, "LK-61" : true, "LK-62" : true, "LK-7" : true, + "LK-71" : true, "LK-72" : true, "LK-8" : true, "LK-81" : true, "LK-82" : true, + "LK-9" : true, "LK-91" : true, "LK-92" : true, "LR-BG" : true, "LR-BM" : true, + "LR-CM" : true, "LR-GB" : true, "LR-GG" : true, "LR-GK" : true, "LR-LO" : true, + "LR-MG" : true, "LR-MO" : true, "LR-MY" : true, "LR-NI" : true, "LR-RI" : true, + "LR-SI" : true, "LS-A" : true, "LS-B" : true, "LS-C" : true, "LS-D" : true, + "LS-E" : true, "LS-F" : true, "LS-G" : true, "LS-H" : true, "LS-J" : true, + "LS-K" : true, "LT-AL" : true, "LT-KL" : true, "LT-KU" : true, "LT-MR" : true, + "LT-PN" : true, "LT-SA" : true, "LT-TA" : true, "LT-TE" : true, "LT-UT" : true, + "LT-VL" : true, "LU-D" : true, "LU-G" : true, "LU-L" : true, "LV-001" : true, + "LV-002" : true, "LV-003" : true, "LV-004" : true, "LV-005" : true, "LV-006" : true, + "LV-007" : true, "LV-008" : true, "LV-009" : true, "LV-010" : true, "LV-011" : true, + "LV-012" : true, "LV-013" : true, "LV-014" : true, "LV-015" : true, "LV-016" : true, + "LV-017" : true, "LV-018" : true, "LV-019" : true, "LV-020" : true, "LV-021" : true, + "LV-022" : true, "LV-023" : true, "LV-024" : true, "LV-025" : true, "LV-026" : true, + "LV-027" : true, "LV-028" : true, "LV-029" : true, "LV-030" : true, "LV-031" : true, + "LV-032" : true, "LV-033" : true, "LV-034" : true, "LV-035" : true, "LV-036" : true, + "LV-037" : true, "LV-038" : true, "LV-039" : true, "LV-040" : true, "LV-041" : true, + "LV-042" : true, "LV-043" : true, "LV-044" : true, "LV-045" : true, "LV-046" : true, + "LV-047" : true, "LV-048" : true, "LV-049" : true, "LV-050" : true, "LV-051" : true, + "LV-052" : true, "LV-053" : true, "LV-054" : true, "LV-055" : true, "LV-056" : true, + "LV-057" : true, "LV-058" : true, "LV-059" : true, "LV-060" : true, "LV-061" : true, + "LV-062" : true, "LV-063" : true, "LV-064" : true, "LV-065" : true, "LV-066" : true, + "LV-067" : true, "LV-068" : true, "LV-069" : true, "LV-070" : true, "LV-071" : true, + "LV-072" : true, "LV-073" : true, "LV-074" : true, "LV-075" : true, "LV-076" : true, + "LV-077" : true, "LV-078" : true, "LV-079" : true, "LV-080" : true, "LV-081" : true, + "LV-082" : true, "LV-083" : true, "LV-084" : true, "LV-085" : true, "LV-086" : true, + "LV-087" : true, "LV-088" : true, "LV-089" : true, "LV-090" : true, "LV-091" : true, + "LV-092" : true, "LV-093" : true, "LV-094" : true, "LV-095" : true, "LV-096" : true, + "LV-097" : true, "LV-098" : true, "LV-099" : true, "LV-100" : true, "LV-101" : true, + "LV-102" : true, "LV-103" : true, "LV-104" : true, "LV-105" : true, "LV-106" : true, + "LV-107" : true, "LV-108" : true, "LV-109" : true, "LV-110" : true, "LV-DGV" : true, + "LV-JEL" : true, "LV-JKB" : true, "LV-JUR" : true, "LV-LPX" : true, "LV-REZ" : true, + "LV-RIX" : true, "LV-VEN" : true, "LV-VMR" : true, "LY-BA" : true, "LY-BU" : true, + "LY-DR" : true, "LY-GT" : true, "LY-JA" : true, "LY-JB" : true, "LY-JG" : true, + "LY-JI" : true, "LY-JU" : true, "LY-KF" : true, "LY-MB" : true, "LY-MI" : true, + "LY-MJ" : true, "LY-MQ" : true, "LY-NL" : true, "LY-NQ" : true, "LY-SB" : true, + "LY-SR" : true, "LY-TB" : true, "LY-WA" : true, "LY-WD" : true, "LY-WS" : true, + "LY-ZA" : true, "MA-01" : true, "MA-02" : true, "MA-03" : true, "MA-04" : true, + "MA-05" : true, "MA-06" : true, "MA-07" : true, "MA-08" : true, "MA-09" : true, + "MA-10" : true, "MA-11" : true, "MA-12" : true, "MA-13" : true, "MA-14" : true, + "MA-15" : true, "MA-16" : true, "MA-AGD" : true, "MA-AOU" : true, "MA-ASZ" : true, + "MA-AZI" : true, "MA-BEM" : true, "MA-BER" : true, "MA-BES" : true, "MA-BOD" : true, + "MA-BOM" : true, "MA-CAS" : true, "MA-CHE" : true, "MA-CHI" : true, "MA-CHT" : true, + "MA-ERR" : true, "MA-ESI" : true, "MA-ESM" : true, "MA-FAH" : true, "MA-FES" : true, + "MA-FIG" : true, "MA-GUE" : true, "MA-HAJ" : true, "MA-HAO" : true, "MA-HOC" : true, + "MA-IFR" : true, "MA-INE" : true, "MA-JDI" : true, "MA-JRA" : true, "MA-KEN" : true, + "MA-KES" : true, "MA-KHE" : true, "MA-KHN" : true, "MA-KHO" : true, "MA-LAA" : true, + "MA-LAR" : true, "MA-MED" : true, "MA-MEK" : true, "MA-MMD" : true, "MA-MMN" : true, + "MA-MOH" : true, "MA-MOU" : true, "MA-NAD" : true, "MA-NOU" : true, "MA-OUA" : true, + "MA-OUD" : true, "MA-OUJ" : true, "MA-RAB" : true, "MA-SAF" : true, "MA-SAL" : true, + "MA-SEF" : true, "MA-SET" : true, "MA-SIK" : true, "MA-SKH" : true, "MA-SYB" : true, + "MA-TAI" : true, "MA-TAO" : true, "MA-TAR" : true, "MA-TAT" : true, "MA-TAZ" : true, + "MA-TET" : true, "MA-TIZ" : true, "MA-TNG" : true, "MA-TNT" : true, "MA-ZAG" : true, + "MC-CL" : true, "MC-CO" : true, "MC-FO" : true, "MC-GA" : true, "MC-JE" : true, + "MC-LA" : true, "MC-MA" : true, "MC-MC" : true, "MC-MG" : true, "MC-MO" : true, + "MC-MU" : true, "MC-PH" : true, "MC-SD" : true, "MC-SO" : true, "MC-SP" : true, + "MC-SR" : true, "MC-VR" : true, "MD-AN" : true, "MD-BA" : true, "MD-BD" : true, + "MD-BR" : true, "MD-BS" : true, "MD-CA" : true, "MD-CL" : true, "MD-CM" : true, + "MD-CR" : true, "MD-CS" : true, "MD-CT" : true, "MD-CU" : true, "MD-DO" : true, + "MD-DR" : true, "MD-DU" : true, "MD-ED" : true, "MD-FA" : true, "MD-FL" : true, + "MD-GA" : true, "MD-GL" : true, "MD-HI" : true, "MD-IA" : true, "MD-LE" : true, + "MD-NI" : true, "MD-OC" : true, "MD-OR" : true, "MD-RE" : true, "MD-RI" : true, + "MD-SD" : true, "MD-SI" : true, "MD-SN" : true, "MD-SO" : true, "MD-ST" : true, + "MD-SV" : true, "MD-TA" : true, "MD-TE" : true, "MD-UN" : true, "ME-01" : true, + "ME-02" : true, "ME-03" : true, "ME-04" : true, "ME-05" : true, "ME-06" : true, + "ME-07" : true, "ME-08" : true, "ME-09" : true, "ME-10" : true, "ME-11" : true, + "ME-12" : true, "ME-13" : true, "ME-14" : true, "ME-15" : true, "ME-16" : true, + "ME-17" : true, "ME-18" : true, "ME-19" : true, "ME-20" : true, "ME-21" : true, + "MG-A" : true, "MG-D" : true, "MG-F" : true, "MG-M" : true, "MG-T" : true, + "MG-U" : true, "MH-ALK" : true, "MH-ALL" : true, "MH-ARN" : true, "MH-AUR" : true, + "MH-EBO" : true, "MH-ENI" : true, "MH-JAB" : true, "MH-JAL" : true, "MH-KIL" : true, + "MH-KWA" : true, "MH-L" : true, "MH-LAE" : true, "MH-LIB" : true, "MH-LIK" : true, + "MH-MAJ" : true, "MH-MAL" : true, "MH-MEJ" : true, "MH-MIL" : true, "MH-NMK" : true, + "MH-NMU" : true, "MH-RON" : true, "MH-T" : true, "MH-UJA" : true, "MH-UTI" : true, + "MH-WTJ" : true, "MH-WTN" : true, "MK-01" : true, "MK-02" : true, "MK-03" : true, + "MK-04" : true, "MK-05" : true, "MK-06" : true, "MK-07" : true, "MK-08" : true, + "MK-09" : true, "MK-10" : true, "MK-11" : true, "MK-12" : true, "MK-13" : true, + "MK-14" : true, "MK-15" : true, "MK-16" : true, "MK-17" : true, "MK-18" : true, + "MK-19" : true, "MK-20" : true, "MK-21" : true, "MK-22" : true, "MK-23" : true, + "MK-24" : true, "MK-25" : true, "MK-26" : true, "MK-27" : true, "MK-28" : true, + "MK-29" : true, "MK-30" : true, "MK-31" : true, "MK-32" : true, "MK-33" : true, + "MK-34" : true, "MK-35" : true, "MK-36" : true, "MK-37" : true, "MK-38" : true, + "MK-39" : true, "MK-40" : true, "MK-41" : true, "MK-42" : true, "MK-43" : true, + "MK-44" : true, "MK-45" : true, "MK-46" : true, "MK-47" : true, "MK-48" : true, + "MK-49" : true, "MK-50" : true, "MK-51" : true, "MK-52" : true, "MK-53" : true, + "MK-54" : true, "MK-55" : true, "MK-56" : true, "MK-57" : true, "MK-58" : true, + "MK-59" : true, "MK-60" : true, "MK-61" : true, "MK-62" : true, "MK-63" : true, + "MK-64" : true, "MK-65" : true, "MK-66" : true, "MK-67" : true, "MK-68" : true, + "MK-69" : true, "MK-70" : true, "MK-71" : true, "MK-72" : true, "MK-73" : true, + "MK-74" : true, "MK-75" : true, "MK-76" : true, "MK-77" : true, "MK-78" : true, + "MK-79" : true, "MK-80" : true, "MK-81" : true, "MK-82" : true, "MK-83" : true, + "MK-84" : true, "ML-1" : true, "ML-2" : true, "ML-3" : true, "ML-4" : true, + "ML-5" : true, "ML-6" : true, "ML-7" : true, "ML-8" : true, "ML-BK0" : true, + "MM-01" : true, "MM-02" : true, "MM-03" : true, "MM-04" : true, "MM-05" : true, + "MM-06" : true, "MM-07" : true, "MM-11" : true, "MM-12" : true, "MM-13" : true, + "MM-14" : true, "MM-15" : true, "MM-16" : true, "MM-17" : true, "MN-035" : true, + "MN-037" : true, "MN-039" : true, "MN-041" : true, "MN-043" : true, "MN-046" : true, + "MN-047" : true, "MN-049" : true, "MN-051" : true, "MN-053" : true, "MN-055" : true, + "MN-057" : true, "MN-059" : true, "MN-061" : true, "MN-063" : true, "MN-064" : true, + "MN-065" : true, "MN-067" : true, "MN-069" : true, "MN-071" : true, "MN-073" : true, + "MN-1" : true, "MR-01" : true, "MR-02" : true, "MR-03" : true, "MR-04" : true, + "MR-05" : true, "MR-06" : true, "MR-07" : true, "MR-08" : true, "MR-09" : true, + "MR-10" : true, "MR-11" : true, "MR-12" : true, "MR-NKC" : true, "MT-01" : true, + "MT-02" : true, "MT-03" : true, "MT-04" : true, "MT-05" : true, "MT-06" : true, + "MT-07" : true, "MT-08" : true, "MT-09" : true, "MT-10" : true, "MT-11" : true, + "MT-12" : true, "MT-13" : true, "MT-14" : true, "MT-15" : true, "MT-16" : true, + "MT-17" : true, "MT-18" : true, "MT-19" : true, "MT-20" : true, "MT-21" : true, + "MT-22" : true, "MT-23" : true, "MT-24" : true, "MT-25" : true, "MT-26" : true, + "MT-27" : true, "MT-28" : true, "MT-29" : true, "MT-30" : true, "MT-31" : true, + "MT-32" : true, "MT-33" : true, "MT-34" : true, "MT-35" : true, "MT-36" : true, + "MT-37" : true, "MT-38" : true, "MT-39" : true, "MT-40" : true, "MT-41" : true, + "MT-42" : true, "MT-43" : true, "MT-44" : true, "MT-45" : true, "MT-46" : true, + "MT-47" : true, "MT-48" : true, "MT-49" : true, "MT-50" : true, "MT-51" : true, + "MT-52" : true, "MT-53" : true, "MT-54" : true, "MT-55" : true, "MT-56" : true, + "MT-57" : true, "MT-58" : true, "MT-59" : true, "MT-60" : true, "MT-61" : true, + "MT-62" : true, "MT-63" : true, "MT-64" : true, "MT-65" : true, "MT-66" : true, + "MT-67" : true, "MT-68" : true, "MU-AG" : true, "MU-BL" : true, "MU-BR" : true, + "MU-CC" : true, "MU-CU" : true, "MU-FL" : true, "MU-GP" : true, "MU-MO" : true, + "MU-PA" : true, "MU-PL" : true, "MU-PU" : true, "MU-PW" : true, "MU-QB" : true, + "MU-RO" : true, "MU-RP" : true, "MU-SA" : true, "MU-VP" : true, "MV-00" : true, + "MV-01" : true, "MV-02" : true, "MV-03" : true, "MV-04" : true, "MV-05" : true, + "MV-07" : true, "MV-08" : true, "MV-12" : true, "MV-13" : true, "MV-14" : true, + "MV-17" : true, "MV-20" : true, "MV-23" : true, "MV-24" : true, "MV-25" : true, + "MV-26" : true, "MV-27" : true, "MV-28" : true, "MV-29" : true, "MV-CE" : true, + "MV-MLE" : true, "MV-NC" : true, "MV-NO" : true, "MV-SC" : true, "MV-SU" : true, + "MV-UN" : true, "MV-US" : true, "MW-BA" : true, "MW-BL" : true, "MW-C" : true, + "MW-CK" : true, "MW-CR" : true, "MW-CT" : true, "MW-DE" : true, "MW-DO" : true, + "MW-KR" : true, "MW-KS" : true, "MW-LI" : true, "MW-LK" : true, "MW-MC" : true, + "MW-MG" : true, "MW-MH" : true, "MW-MU" : true, "MW-MW" : true, "MW-MZ" : true, + "MW-N" : true, "MW-NB" : true, "MW-NE" : true, "MW-NI" : true, "MW-NK" : true, + "MW-NS" : true, "MW-NU" : true, "MW-PH" : true, "MW-RU" : true, "MW-S" : true, + "MW-SA" : true, "MW-TH" : true, "MW-ZO" : true, "MX-AGU" : true, "MX-BCN" : true, + "MX-BCS" : true, "MX-CAM" : true, "MX-CHH" : true, "MX-CHP" : true, "MX-COA" : true, + "MX-COL" : true, "MX-DIF" : true, "MX-DUR" : true, "MX-GRO" : true, "MX-GUA" : true, + "MX-HID" : true, "MX-JAL" : true, "MX-MEX" : true, "MX-MIC" : true, "MX-MOR" : true, + "MX-NAY" : true, "MX-NLE" : true, "MX-OAX" : true, "MX-PUE" : true, "MX-QUE" : true, + "MX-ROO" : true, "MX-SIN" : true, "MX-SLP" : true, "MX-SON" : true, "MX-TAB" : true, + "MX-TAM" : true, "MX-TLA" : true, "MX-VER" : true, "MX-YUC" : true, "MX-ZAC" : true, + "MY-01" : true, "MY-02" : true, "MY-03" : true, "MY-04" : true, "MY-05" : true, + "MY-06" : true, "MY-07" : true, "MY-08" : true, "MY-09" : true, "MY-10" : true, + "MY-11" : true, "MY-12" : true, "MY-13" : true, "MY-14" : true, "MY-15" : true, + "MY-16" : true, "MZ-A" : true, "MZ-B" : true, "MZ-G" : true, "MZ-I" : true, + "MZ-L" : true, "MZ-MPM" : true, "MZ-N" : true, "MZ-P" : true, "MZ-Q" : true, + "MZ-S" : true, "MZ-T" : true, "NA-CA" : true, "NA-ER" : true, "NA-HA" : true, + "NA-KA" : true, "NA-KH" : true, "NA-KU" : true, "NA-OD" : true, "NA-OH" : true, + "NA-OK" : true, "NA-ON" : true, "NA-OS" : true, "NA-OT" : true, "NA-OW" : true, + "NE-1" : true, "NE-2" : true, "NE-3" : true, "NE-4" : true, "NE-5" : true, + "NE-6" : true, "NE-7" : true, "NE-8" : true, "NG-AB" : true, "NG-AD" : true, + "NG-AK" : true, "NG-AN" : true, "NG-BA" : true, "NG-BE" : true, "NG-BO" : true, + "NG-BY" : true, "NG-CR" : true, "NG-DE" : true, "NG-EB" : true, "NG-ED" : true, + "NG-EK" : true, "NG-EN" : true, "NG-FC" : true, "NG-GO" : true, "NG-IM" : true, + "NG-JI" : true, "NG-KD" : true, "NG-KE" : true, "NG-KN" : true, "NG-KO" : true, + "NG-KT" : true, "NG-KW" : true, "NG-LA" : true, "NG-NA" : true, "NG-NI" : true, + "NG-OG" : true, "NG-ON" : true, "NG-OS" : true, "NG-OY" : true, "NG-PL" : true, + "NG-RI" : true, "NG-SO" : true, "NG-TA" : true, "NG-YO" : true, "NG-ZA" : true, + "NI-AN" : true, "NI-AS" : true, "NI-BO" : true, "NI-CA" : true, "NI-CI" : true, + "NI-CO" : true, "NI-ES" : true, "NI-GR" : true, "NI-JI" : true, "NI-LE" : true, + "NI-MD" : true, "NI-MN" : true, "NI-MS" : true, "NI-MT" : true, "NI-NS" : true, + "NI-RI" : true, "NI-SJ" : true, "NL-AW" : true, "NL-BQ1" : true, "NL-BQ2" : true, + "NL-BQ3" : true, "NL-CW" : true, "NL-DR" : true, "NL-FL" : true, "NL-FR" : true, + "NL-GE" : true, "NL-GR" : true, "NL-LI" : true, "NL-NB" : true, "NL-NH" : true, + "NL-OV" : true, "NL-SX" : true, "NL-UT" : true, "NL-ZE" : true, "NL-ZH" : true, + "NO-01" : true, "NO-02" : true, "NO-03" : true, "NO-04" : true, "NO-05" : true, + "NO-06" : true, "NO-07" : true, "NO-08" : true, "NO-09" : true, "NO-10" : true, + "NO-11" : true, "NO-12" : true, "NO-14" : true, "NO-15" : true, "NO-16" : true, + "NO-17" : true, "NO-18" : true, "NO-19" : true, "NO-20" : true, "NO-21" : true, + "NO-22" : true, "NP-1" : true, "NP-2" : true, "NP-3" : true, "NP-4" : true, + "NP-5" : true, "NP-BA" : true, "NP-BH" : true, "NP-DH" : true, "NP-GA" : true, + "NP-JA" : true, "NP-KA" : true, "NP-KO" : true, "NP-LU" : true, "NP-MA" : true, + "NP-ME" : true, "NP-NA" : true, "NP-RA" : true, "NP-SA" : true, "NP-SE" : true, + "NR-01" : true, "NR-02" : true, "NR-03" : true, "NR-04" : true, "NR-05" : true, + "NR-06" : true, "NR-07" : true, "NR-08" : true, "NR-09" : true, "NR-10" : true, + "NR-11" : true, "NR-12" : true, "NR-13" : true, "NR-14" : true, "NZ-AUK" : true, + "NZ-BOP" : true, "NZ-CAN" : true, "NZ-CIT" : true, "NZ-GIS" : true, "NZ-HKB" : true, + "NZ-MBH" : true, "NZ-MWT" : true, "NZ-N" : true, "NZ-NSN" : true, "NZ-NTL" : true, + "NZ-OTA" : true, "NZ-S" : true, "NZ-STL" : true, "NZ-TAS" : true, "NZ-TKI" : true, + "NZ-WGN" : true, "NZ-WKO" : true, "NZ-WTC" : true, "OM-BA" : true, "OM-BU" : true, + "OM-DA" : true, "OM-MA" : true, "OM-MU" : true, "OM-SH" : true, "OM-WU" : true, + "OM-ZA" : true, "OM-ZU" : true, "PA-1" : true, "PA-2" : true, "PA-3" : true, + "PA-4" : true, "PA-5" : true, "PA-6" : true, "PA-7" : true, "PA-8" : true, + "PA-9" : true, "PA-EM" : true, "PA-KY" : true, "PA-NB" : true, "PE-AMA" : true, + "PE-ANC" : true, "PE-APU" : true, "PE-ARE" : true, "PE-AYA" : true, "PE-CAJ" : true, + "PE-CAL" : true, "PE-CUS" : true, "PE-HUC" : true, "PE-HUV" : true, "PE-ICA" : true, + "PE-JUN" : true, "PE-LAL" : true, "PE-LAM" : true, "PE-LIM" : true, "PE-LMA" : true, + "PE-LOR" : true, "PE-MDD" : true, "PE-MOQ" : true, "PE-PAS" : true, "PE-PIU" : true, + "PE-PUN" : true, "PE-SAM" : true, "PE-TAC" : true, "PE-TUM" : true, "PE-UCA" : true, + "PG-CPK" : true, "PG-CPM" : true, "PG-EBR" : true, "PG-EHG" : true, "PG-EPW" : true, + "PG-ESW" : true, "PG-GPK" : true, "PG-MBA" : true, "PG-MPL" : true, "PG-MPM" : true, + "PG-MRL" : true, "PG-NCD" : true, "PG-NIK" : true, "PG-NPP" : true, "PG-NSB" : true, + "PG-SAN" : true, "PG-SHM" : true, "PG-WBK" : true, "PG-WHM" : true, "PG-WPD" : true, + "PH-00" : true, "PH-01" : true, "PH-02" : true, "PH-03" : true, "PH-05" : true, + "PH-06" : true, "PH-07" : true, "PH-08" : true, "PH-09" : true, "PH-10" : true, + "PH-11" : true, "PH-12" : true, "PH-13" : true, "PH-14" : true, "PH-15" : true, + "PH-40" : true, "PH-41" : true, "PH-ABR" : true, "PH-AGN" : true, "PH-AGS" : true, + "PH-AKL" : true, "PH-ALB" : true, "PH-ANT" : true, "PH-APA" : true, "PH-AUR" : true, + "PH-BAN" : true, "PH-BAS" : true, "PH-BEN" : true, "PH-BIL" : true, "PH-BOH" : true, + "PH-BTG" : true, "PH-BTN" : true, "PH-BUK" : true, "PH-BUL" : true, "PH-CAG" : true, + "PH-CAM" : true, "PH-CAN" : true, "PH-CAP" : true, "PH-CAS" : true, "PH-CAT" : true, + "PH-CAV" : true, "PH-CEB" : true, "PH-COM" : true, "PH-DAO" : true, "PH-DAS" : true, + "PH-DAV" : true, "PH-DIN" : true, "PH-EAS" : true, "PH-GUI" : true, "PH-IFU" : true, + "PH-ILI" : true, "PH-ILN" : true, "PH-ILS" : true, "PH-ISA" : true, "PH-KAL" : true, + "PH-LAG" : true, "PH-LAN" : true, "PH-LAS" : true, "PH-LEY" : true, "PH-LUN" : true, + "PH-MAD" : true, "PH-MAG" : true, "PH-MAS" : true, "PH-MDC" : true, "PH-MDR" : true, + "PH-MOU" : true, "PH-MSC" : true, "PH-MSR" : true, "PH-NCO" : true, "PH-NEC" : true, + "PH-NER" : true, "PH-NSA" : true, "PH-NUE" : true, "PH-NUV" : true, "PH-PAM" : true, + "PH-PAN" : true, "PH-PLW" : true, "PH-QUE" : true, "PH-QUI" : true, "PH-RIZ" : true, + "PH-ROM" : true, "PH-SAR" : true, "PH-SCO" : true, "PH-SIG" : true, "PH-SLE" : true, + "PH-SLU" : true, "PH-SOR" : true, "PH-SUK" : true, "PH-SUN" : true, "PH-SUR" : true, + "PH-TAR" : true, "PH-TAW" : true, "PH-WSA" : true, "PH-ZAN" : true, "PH-ZAS" : true, + "PH-ZMB" : true, "PH-ZSI" : true, "PK-BA" : true, "PK-GB" : true, "PK-IS" : true, + "PK-JK" : true, "PK-KP" : true, "PK-PB" : true, "PK-SD" : true, "PK-TA" : true, + "PL-DS" : true, "PL-KP" : true, "PL-LB" : true, "PL-LD" : true, "PL-LU" : true, + "PL-MA" : true, "PL-MZ" : true, "PL-OP" : true, "PL-PD" : true, "PL-PK" : true, + "PL-PM" : true, "PL-SK" : true, "PL-SL" : true, "PL-WN" : true, "PL-WP" : true, + "PL-ZP" : true, "PS-BTH" : true, "PS-DEB" : true, "PS-GZA" : true, "PS-HBN" : true, + "PS-JEM" : true, "PS-JEN" : true, "PS-JRH" : true, "PS-KYS" : true, "PS-NBS" : true, + "PS-NGZ" : true, "PS-QQA" : true, "PS-RBH" : true, "PS-RFH" : true, "PS-SLT" : true, + "PS-TBS" : true, "PS-TKM" : true, "PT-01" : true, "PT-02" : true, "PT-03" : true, + "PT-04" : true, "PT-05" : true, "PT-06" : true, "PT-07" : true, "PT-08" : true, + "PT-09" : true, "PT-10" : true, "PT-11" : true, "PT-12" : true, "PT-13" : true, + "PT-14" : true, "PT-15" : true, "PT-16" : true, "PT-17" : true, "PT-18" : true, + "PT-20" : true, "PT-30" : true, "PW-002" : true, "PW-004" : true, "PW-010" : true, + "PW-050" : true, "PW-100" : true, "PW-150" : true, "PW-212" : true, "PW-214" : true, + "PW-218" : true, "PW-222" : true, "PW-224" : true, "PW-226" : true, "PW-227" : true, + "PW-228" : true, "PW-350" : true, "PW-370" : true, "PY-1" : true, "PY-10" : true, + "PY-11" : true, "PY-12" : true, "PY-13" : true, "PY-14" : true, "PY-15" : true, + "PY-16" : true, "PY-19" : true, "PY-2" : true, "PY-3" : true, "PY-4" : true, + "PY-5" : true, "PY-6" : true, "PY-7" : true, "PY-8" : true, "PY-9" : true, + "PY-ASU" : true, "QA-DA" : true, "QA-KH" : true, "QA-MS" : true, "QA-RA" : true, + "QA-US" : true, "QA-WA" : true, "QA-ZA" : true, "RO-AB" : true, "RO-AG" : true, + "RO-AR" : true, "RO-B" : true, "RO-BC" : true, "RO-BH" : true, "RO-BN" : true, + "RO-BR" : true, "RO-BT" : true, "RO-BV" : true, "RO-BZ" : true, "RO-CJ" : true, + "RO-CL" : true, "RO-CS" : true, "RO-CT" : true, "RO-CV" : true, "RO-DB" : true, + "RO-DJ" : true, "RO-GJ" : true, "RO-GL" : true, "RO-GR" : true, "RO-HD" : true, + "RO-HR" : true, "RO-IF" : true, "RO-IL" : true, "RO-IS" : true, "RO-MH" : true, + "RO-MM" : true, "RO-MS" : true, "RO-NT" : true, "RO-OT" : true, "RO-PH" : true, + "RO-SB" : true, "RO-SJ" : true, "RO-SM" : true, "RO-SV" : true, "RO-TL" : true, + "RO-TM" : true, "RO-TR" : true, "RO-VL" : true, "RO-VN" : true, "RO-VS" : true, + "RS-00" : true, "RS-01" : true, "RS-02" : true, "RS-03" : true, "RS-04" : true, + "RS-05" : true, "RS-06" : true, "RS-07" : true, "RS-08" : true, "RS-09" : true, + "RS-10" : true, "RS-11" : true, "RS-12" : true, "RS-13" : true, "RS-14" : true, + "RS-15" : true, "RS-16" : true, "RS-17" : true, "RS-18" : true, "RS-19" : true, + "RS-20" : true, "RS-21" : true, "RS-22" : true, "RS-23" : true, "RS-24" : true, + "RS-25" : true, "RS-26" : true, "RS-27" : true, "RS-28" : true, "RS-29" : true, + "RS-KM" : true, "RS-VO" : true, "RU-AD" : true, "RU-AL" : true, "RU-ALT" : true, + "RU-AMU" : true, "RU-ARK" : true, "RU-AST" : true, "RU-BA" : true, "RU-BEL" : true, + "RU-BRY" : true, "RU-BU" : true, "RU-CE" : true, "RU-CHE" : true, "RU-CHU" : true, + "RU-CU" : true, "RU-DA" : true, "RU-IN" : true, "RU-IRK" : true, "RU-IVA" : true, + "RU-KAM" : true, "RU-KB" : true, "RU-KC" : true, "RU-KDA" : true, "RU-KEM" : true, + "RU-KGD" : true, "RU-KGN" : true, "RU-KHA" : true, "RU-KHM" : true, "RU-KIR" : true, + "RU-KK" : true, "RU-KL" : true, "RU-KLU" : true, "RU-KO" : true, "RU-KOS" : true, + "RU-KR" : true, "RU-KRS" : true, "RU-KYA" : true, "RU-LEN" : true, "RU-LIP" : true, + "RU-MAG" : true, "RU-ME" : true, "RU-MO" : true, "RU-MOS" : true, "RU-MOW" : true, + "RU-MUR" : true, "RU-NEN" : true, "RU-NGR" : true, "RU-NIZ" : true, "RU-NVS" : true, + "RU-OMS" : true, "RU-ORE" : true, "RU-ORL" : true, "RU-PER" : true, "RU-PNZ" : true, + "RU-PRI" : true, "RU-PSK" : true, "RU-ROS" : true, "RU-RYA" : true, "RU-SA" : true, + "RU-SAK" : true, "RU-SAM" : true, "RU-SAR" : true, "RU-SE" : true, "RU-SMO" : true, + "RU-SPE" : true, "RU-STA" : true, "RU-SVE" : true, "RU-TA" : true, "RU-TAM" : true, + "RU-TOM" : true, "RU-TUL" : true, "RU-TVE" : true, "RU-TY" : true, "RU-TYU" : true, + "RU-UD" : true, "RU-ULY" : true, "RU-VGG" : true, "RU-VLA" : true, "RU-VLG" : true, + "RU-VOR" : true, "RU-YAN" : true, "RU-YAR" : true, "RU-YEV" : true, "RU-ZAB" : true, + "RW-01" : true, "RW-02" : true, "RW-03" : true, "RW-04" : true, "RW-05" : true, + "SA-01" : true, "SA-02" : true, "SA-03" : true, "SA-04" : true, "SA-05" : true, + "SA-06" : true, "SA-07" : true, "SA-08" : true, "SA-09" : true, "SA-10" : true, + "SA-11" : true, "SA-12" : true, "SA-14" : true, "SB-CE" : true, "SB-CH" : true, + "SB-CT" : true, "SB-GU" : true, "SB-IS" : true, "SB-MK" : true, "SB-ML" : true, + "SB-RB" : true, "SB-TE" : true, "SB-WE" : true, "SC-01" : true, "SC-02" : true, + "SC-03" : true, "SC-04" : true, "SC-05" : true, "SC-06" : true, "SC-07" : true, + "SC-08" : true, "SC-09" : true, "SC-10" : true, "SC-11" : true, "SC-12" : true, + "SC-13" : true, "SC-14" : true, "SC-15" : true, "SC-16" : true, "SC-17" : true, + "SC-18" : true, "SC-19" : true, "SC-20" : true, "SC-21" : true, "SC-22" : true, + "SC-23" : true, "SC-24" : true, "SC-25" : true, "SD-DC" : true, "SD-DE" : true, + "SD-DN" : true, "SD-DS" : true, "SD-DW" : true, "SD-GD" : true, "SD-GZ" : true, + "SD-KA" : true, "SD-KH" : true, "SD-KN" : true, "SD-KS" : true, "SD-NB" : true, + "SD-NO" : true, "SD-NR" : true, "SD-NW" : true, "SD-RS" : true, "SD-SI" : true, + "SE-AB" : true, "SE-AC" : true, "SE-BD" : true, "SE-C" : true, "SE-D" : true, + "SE-E" : true, "SE-F" : true, "SE-G" : true, "SE-H" : true, "SE-I" : true, + "SE-K" : true, "SE-M" : true, "SE-N" : true, "SE-O" : true, "SE-S" : true, + "SE-T" : true, "SE-U" : true, "SE-W" : true, "SE-X" : true, "SE-Y" : true, + "SE-Z" : true, "SG-01" : true, "SG-02" : true, "SG-03" : true, "SG-04" : true, + "SG-05" : true, "SH-AC" : true, "SH-HL" : true, "SH-TA" : true, "SI-001" : true, + "SI-002" : true, "SI-003" : true, "SI-004" : true, "SI-005" : true, "SI-006" : true, + "SI-007" : true, "SI-008" : true, "SI-009" : true, "SI-010" : true, "SI-011" : true, + "SI-012" : true, "SI-013" : true, "SI-014" : true, "SI-015" : true, "SI-016" : true, + "SI-017" : true, "SI-018" : true, "SI-019" : true, "SI-020" : true, "SI-021" : true, + "SI-022" : true, "SI-023" : true, "SI-024" : true, "SI-025" : true, "SI-026" : true, + "SI-027" : true, "SI-028" : true, "SI-029" : true, "SI-030" : true, "SI-031" : true, + "SI-032" : true, "SI-033" : true, "SI-034" : true, "SI-035" : true, "SI-036" : true, + "SI-037" : true, "SI-038" : true, "SI-039" : true, "SI-040" : true, "SI-041" : true, + "SI-042" : true, "SI-043" : true, "SI-044" : true, "SI-045" : true, "SI-046" : true, + "SI-047" : true, "SI-048" : true, "SI-049" : true, "SI-050" : true, "SI-051" : true, + "SI-052" : true, "SI-053" : true, "SI-054" : true, "SI-055" : true, "SI-056" : true, + "SI-057" : true, "SI-058" : true, "SI-059" : true, "SI-060" : true, "SI-061" : true, + "SI-062" : true, "SI-063" : true, "SI-064" : true, "SI-065" : true, "SI-066" : true, + "SI-067" : true, "SI-068" : true, "SI-069" : true, "SI-070" : true, "SI-071" : true, + "SI-072" : true, "SI-073" : true, "SI-074" : true, "SI-075" : true, "SI-076" : true, + "SI-077" : true, "SI-078" : true, "SI-079" : true, "SI-080" : true, "SI-081" : true, + "SI-082" : true, "SI-083" : true, "SI-084" : true, "SI-085" : true, "SI-086" : true, + "SI-087" : true, "SI-088" : true, "SI-089" : true, "SI-090" : true, "SI-091" : true, + "SI-092" : true, "SI-093" : true, "SI-094" : true, "SI-095" : true, "SI-096" : true, + "SI-097" : true, "SI-098" : true, "SI-099" : true, "SI-100" : true, "SI-101" : true, + "SI-102" : true, "SI-103" : true, "SI-104" : true, "SI-105" : true, "SI-106" : true, + "SI-107" : true, "SI-108" : true, "SI-109" : true, "SI-110" : true, "SI-111" : true, + "SI-112" : true, "SI-113" : true, "SI-114" : true, "SI-115" : true, "SI-116" : true, + "SI-117" : true, "SI-118" : true, "SI-119" : true, "SI-120" : true, "SI-121" : true, + "SI-122" : true, "SI-123" : true, "SI-124" : true, "SI-125" : true, "SI-126" : true, + "SI-127" : true, "SI-128" : true, "SI-129" : true, "SI-130" : true, "SI-131" : true, + "SI-132" : true, "SI-133" : true, "SI-134" : true, "SI-135" : true, "SI-136" : true, + "SI-137" : true, "SI-138" : true, "SI-139" : true, "SI-140" : true, "SI-141" : true, + "SI-142" : true, "SI-143" : true, "SI-144" : true, "SI-146" : true, "SI-147" : true, + "SI-148" : true, "SI-149" : true, "SI-150" : true, "SI-151" : true, "SI-152" : true, + "SI-153" : true, "SI-154" : true, "SI-155" : true, "SI-156" : true, "SI-157" : true, + "SI-158" : true, "SI-159" : true, "SI-160" : true, "SI-161" : true, "SI-162" : true, + "SI-163" : true, "SI-164" : true, "SI-165" : true, "SI-166" : true, "SI-167" : true, + "SI-168" : true, "SI-169" : true, "SI-170" : true, "SI-171" : true, "SI-172" : true, + "SI-173" : true, "SI-174" : true, "SI-175" : true, "SI-176" : true, "SI-177" : true, + "SI-178" : true, "SI-179" : true, "SI-180" : true, "SI-181" : true, "SI-182" : true, + "SI-183" : true, "SI-184" : true, "SI-185" : true, "SI-186" : true, "SI-187" : true, + "SI-188" : true, "SI-189" : true, "SI-190" : true, "SI-191" : true, "SI-192" : true, + "SI-193" : true, "SI-194" : true, "SI-195" : true, "SI-196" : true, "SI-197" : true, + "SI-198" : true, "SI-199" : true, "SI-200" : true, "SI-201" : true, "SI-202" : true, + "SI-203" : true, "SI-204" : true, "SI-205" : true, "SI-206" : true, "SI-207" : true, + "SI-208" : true, "SI-209" : true, "SI-210" : true, "SI-211" : true, "SK-BC" : true, + "SK-BL" : true, "SK-KI" : true, "SK-NI" : true, "SK-PV" : true, "SK-TA" : true, + "SK-TC" : true, "SK-ZI" : true, "SL-E" : true, "SL-N" : true, "SL-S" : true, + "SL-W" : true, "SM-01" : true, "SM-02" : true, "SM-03" : true, "SM-04" : true, + "SM-05" : true, "SM-06" : true, "SM-07" : true, "SM-08" : true, "SM-09" : true, + "SN-DB" : true, "SN-DK" : true, "SN-FK" : true, "SN-KA" : true, "SN-KD" : true, + "SN-KE" : true, "SN-KL" : true, "SN-LG" : true, "SN-MT" : true, "SN-SE" : true, + "SN-SL" : true, "SN-TC" : true, "SN-TH" : true, "SN-ZG" : true, "SO-AW" : true, + "SO-BK" : true, "SO-BN" : true, "SO-BR" : true, "SO-BY" : true, "SO-GA" : true, + "SO-GE" : true, "SO-HI" : true, "SO-JD" : true, "SO-JH" : true, "SO-MU" : true, + "SO-NU" : true, "SO-SA" : true, "SO-SD" : true, "SO-SH" : true, "SO-SO" : true, + "SO-TO" : true, "SO-WO" : true, "SR-BR" : true, "SR-CM" : true, "SR-CR" : true, + "SR-MA" : true, "SR-NI" : true, "SR-PM" : true, "SR-PR" : true, "SR-SA" : true, + "SR-SI" : true, "SR-WA" : true, "SS-BN" : true, "SS-BW" : true, "SS-EC" : true, + "SS-EE8" : true, "SS-EW" : true, "SS-JG" : true, "SS-LK" : true, "SS-NU" : true, + "SS-UY" : true, "SS-WR" : true, "ST-P" : true, "ST-S" : true, "SV-AH" : true, + "SV-CA" : true, "SV-CH" : true, "SV-CU" : true, "SV-LI" : true, "SV-MO" : true, + "SV-PA" : true, "SV-SA" : true, "SV-SM" : true, "SV-SO" : true, "SV-SS" : true, + "SV-SV" : true, "SV-UN" : true, "SV-US" : true, "SY-DI" : true, "SY-DR" : true, + "SY-DY" : true, "SY-HA" : true, "SY-HI" : true, "SY-HL" : true, "SY-HM" : true, + "SY-ID" : true, "SY-LA" : true, "SY-QU" : true, "SY-RA" : true, "SY-RD" : true, + "SY-SU" : true, "SY-TA" : true, "SZ-HH" : true, "SZ-LU" : true, "SZ-MA" : true, + "SZ-SH" : true, "TD-BA" : true, "TD-BG" : true, "TD-BO" : true, "TD-CB" : true, + "TD-EN" : true, "TD-GR" : true, "TD-HL" : true, "TD-KA" : true, "TD-LC" : true, + "TD-LO" : true, "TD-LR" : true, "TD-MA" : true, "TD-MC" : true, "TD-ME" : true, + "TD-MO" : true, "TD-ND" : true, "TD-OD" : true, "TD-SA" : true, "TD-SI" : true, + "TD-TA" : true, "TD-TI" : true, "TD-WF" : true, "TG-C" : true, "TG-K" : true, + "TG-M" : true, "TG-P" : true, "TG-S" : true, "TH-10" : true, "TH-11" : true, + "TH-12" : true, "TH-13" : true, "TH-14" : true, "TH-15" : true, "TH-16" : true, + "TH-17" : true, "TH-18" : true, "TH-19" : true, "TH-20" : true, "TH-21" : true, + "TH-22" : true, "TH-23" : true, "TH-24" : true, "TH-25" : true, "TH-26" : true, + "TH-27" : true, "TH-30" : true, "TH-31" : true, "TH-32" : true, "TH-33" : true, + "TH-34" : true, "TH-35" : true, "TH-36" : true, "TH-37" : true, "TH-39" : true, + "TH-40" : true, "TH-41" : true, "TH-42" : true, "TH-43" : true, "TH-44" : true, + "TH-45" : true, "TH-46" : true, "TH-47" : true, "TH-48" : true, "TH-49" : true, + "TH-50" : true, "TH-51" : true, "TH-52" : true, "TH-53" : true, "TH-54" : true, + "TH-55" : true, "TH-56" : true, "TH-57" : true, "TH-58" : true, "TH-60" : true, + "TH-61" : true, "TH-62" : true, "TH-63" : true, "TH-64" : true, "TH-65" : true, + "TH-66" : true, "TH-67" : true, "TH-70" : true, "TH-71" : true, "TH-72" : true, + "TH-73" : true, "TH-74" : true, "TH-75" : true, "TH-76" : true, "TH-77" : true, + "TH-80" : true, "TH-81" : true, "TH-82" : true, "TH-83" : true, "TH-84" : true, + "TH-85" : true, "TH-86" : true, "TH-90" : true, "TH-91" : true, "TH-92" : true, + "TH-93" : true, "TH-94" : true, "TH-95" : true, "TH-96" : true, "TH-S" : true, + "TJ-GB" : true, "TJ-KT" : true, "TJ-SU" : true, "TL-AL" : true, "TL-AN" : true, + "TL-BA" : true, "TL-BO" : true, "TL-CO" : true, "TL-DI" : true, "TL-ER" : true, + "TL-LA" : true, "TL-LI" : true, "TL-MF" : true, "TL-MT" : true, "TL-OE" : true, + "TL-VI" : true, "TM-A" : true, "TM-B" : true, "TM-D" : true, "TM-L" : true, + "TM-M" : true, "TM-S" : true, "TN-11" : true, "TN-12" : true, "TN-13" : true, + "TN-14" : true, "TN-21" : true, "TN-22" : true, "TN-23" : true, "TN-31" : true, + "TN-32" : true, "TN-33" : true, "TN-34" : true, "TN-41" : true, "TN-42" : true, + "TN-43" : true, "TN-51" : true, "TN-52" : true, "TN-53" : true, "TN-61" : true, + "TN-71" : true, "TN-72" : true, "TN-73" : true, "TN-81" : true, "TN-82" : true, + "TN-83" : true, "TO-01" : true, "TO-02" : true, "TO-03" : true, "TO-04" : true, + "TO-05" : true, "TR-01" : true, "TR-02" : true, "TR-03" : true, "TR-04" : true, + "TR-05" : true, "TR-06" : true, "TR-07" : true, "TR-08" : true, "TR-09" : true, + "TR-10" : true, "TR-11" : true, "TR-12" : true, "TR-13" : true, "TR-14" : true, + "TR-15" : true, "TR-16" : true, "TR-17" : true, "TR-18" : true, "TR-19" : true, + "TR-20" : true, "TR-21" : true, "TR-22" : true, "TR-23" : true, "TR-24" : true, + "TR-25" : true, "TR-26" : true, "TR-27" : true, "TR-28" : true, "TR-29" : true, + "TR-30" : true, "TR-31" : true, "TR-32" : true, "TR-33" : true, "TR-34" : true, + "TR-35" : true, "TR-36" : true, "TR-37" : true, "TR-38" : true, "TR-39" : true, + "TR-40" : true, "TR-41" : true, "TR-42" : true, "TR-43" : true, "TR-44" : true, + "TR-45" : true, "TR-46" : true, "TR-47" : true, "TR-48" : true, "TR-49" : true, + "TR-50" : true, "TR-51" : true, "TR-52" : true, "TR-53" : true, "TR-54" : true, + "TR-55" : true, "TR-56" : true, "TR-57" : true, "TR-58" : true, "TR-59" : true, + "TR-60" : true, "TR-61" : true, "TR-62" : true, "TR-63" : true, "TR-64" : true, + "TR-65" : true, "TR-66" : true, "TR-67" : true, "TR-68" : true, "TR-69" : true, + "TR-70" : true, "TR-71" : true, "TR-72" : true, "TR-73" : true, "TR-74" : true, + "TR-75" : true, "TR-76" : true, "TR-77" : true, "TR-78" : true, "TR-79" : true, + "TR-80" : true, "TR-81" : true, "TT-ARI" : true, "TT-CHA" : true, "TT-CTT" : true, + "TT-DMN" : true, "TT-ETO" : true, "TT-PED" : true, "TT-POS" : true, "TT-PRT" : true, + "TT-PTF" : true, "TT-RCM" : true, "TT-SFO" : true, "TT-SGE" : true, "TT-SIP" : true, + "TT-SJL" : true, "TT-TUP" : true, "TT-WTO" : true, "TV-FUN" : true, "TV-NIT" : true, + "TV-NKF" : true, "TV-NKL" : true, "TV-NMA" : true, "TV-NMG" : true, "TV-NUI" : true, + "TV-VAI" : true, "TW-CHA" : true, "TW-CYI" : true, "TW-CYQ" : true, "TW-HSQ" : true, + "TW-HSZ" : true, "TW-HUA" : true, "TW-ILA" : true, "TW-KEE" : true, "TW-KHH" : true, + "TW-KHQ" : true, "TW-MIA" : true, "TW-NAN" : true, "TW-PEN" : true, "TW-PIF" : true, + "TW-TAO" : true, "TW-TNN" : true, "TW-TNQ" : true, "TW-TPE" : true, "TW-TPQ" : true, + "TW-TTT" : true, "TW-TXG" : true, "TW-TXQ" : true, "TW-YUN" : true, "TZ-01" : true, + "TZ-02" : true, "TZ-03" : true, "TZ-04" : true, "TZ-05" : true, "TZ-06" : true, + "TZ-07" : true, "TZ-08" : true, "TZ-09" : true, "TZ-10" : true, "TZ-11" : true, + "TZ-12" : true, "TZ-13" : true, "TZ-14" : true, "TZ-15" : true, "TZ-16" : true, + "TZ-17" : true, "TZ-18" : true, "TZ-19" : true, "TZ-20" : true, "TZ-21" : true, + "TZ-22" : true, "TZ-23" : true, "TZ-24" : true, "TZ-25" : true, "TZ-26" : true, + "UA-05" : true, "UA-07" : true, "UA-09" : true, "UA-12" : true, "UA-14" : true, + "UA-18" : true, "UA-21" : true, "UA-23" : true, "UA-26" : true, "UA-30" : true, + "UA-32" : true, "UA-35" : true, "UA-40" : true, "UA-43" : true, "UA-46" : true, + "UA-48" : true, "UA-51" : true, "UA-53" : true, "UA-56" : true, "UA-59" : true, + "UA-61" : true, "UA-63" : true, "UA-65" : true, "UA-68" : true, "UA-71" : true, + "UA-74" : true, "UA-77" : true, "UG-101" : true, "UG-102" : true, "UG-103" : true, + "UG-104" : true, "UG-105" : true, "UG-106" : true, "UG-107" : true, "UG-108" : true, + "UG-109" : true, "UG-110" : true, "UG-111" : true, "UG-112" : true, "UG-113" : true, + "UG-114" : true, "UG-115" : true, "UG-116" : true, "UG-201" : true, "UG-202" : true, + "UG-203" : true, "UG-204" : true, "UG-205" : true, "UG-206" : true, "UG-207" : true, + "UG-208" : true, "UG-209" : true, "UG-210" : true, "UG-211" : true, "UG-212" : true, + "UG-213" : true, "UG-214" : true, "UG-215" : true, "UG-216" : true, "UG-217" : true, + "UG-218" : true, "UG-219" : true, "UG-220" : true, "UG-221" : true, "UG-222" : true, + "UG-223" : true, "UG-224" : true, "UG-301" : true, "UG-302" : true, "UG-303" : true, + "UG-304" : true, "UG-305" : true, "UG-306" : true, "UG-307" : true, "UG-308" : true, + "UG-309" : true, "UG-310" : true, "UG-311" : true, "UG-312" : true, "UG-313" : true, + "UG-314" : true, "UG-315" : true, "UG-316" : true, "UG-317" : true, "UG-318" : true, + "UG-319" : true, "UG-320" : true, "UG-321" : true, "UG-401" : true, "UG-402" : true, + "UG-403" : true, "UG-404" : true, "UG-405" : true, "UG-406" : true, "UG-407" : true, + "UG-408" : true, "UG-409" : true, "UG-410" : true, "UG-411" : true, "UG-412" : true, + "UG-413" : true, "UG-414" : true, "UG-415" : true, "UG-416" : true, "UG-417" : true, + "UG-418" : true, "UG-419" : true, "UG-C" : true, "UG-E" : true, "UG-N" : true, + "UG-W" : true, "UM-67" : true, "UM-71" : true, "UM-76" : true, "UM-79" : true, + "UM-81" : true, "UM-84" : true, "UM-86" : true, "UM-89" : true, "UM-95" : true, + "US-AK" : true, "US-AL" : true, "US-AR" : true, "US-AS" : true, "US-AZ" : true, + "US-CA" : true, "US-CO" : true, "US-CT" : true, "US-DC" : true, "US-DE" : true, + "US-FL" : true, "US-GA" : true, "US-GU" : true, "US-HI" : true, "US-IA" : true, + "US-ID" : true, "US-IL" : true, "US-IN" : true, "US-KS" : true, "US-KY" : true, + "US-LA" : true, "US-MA" : true, "US-MD" : true, "US-ME" : true, "US-MI" : true, + "US-MN" : true, "US-MO" : true, "US-MP" : true, "US-MS" : true, "US-MT" : true, + "US-NC" : true, "US-ND" : true, "US-NE" : true, "US-NH" : true, "US-NJ" : true, + "US-NM" : true, "US-NV" : true, "US-NY" : true, "US-OH" : true, "US-OK" : true, + "US-OR" : true, "US-PA" : true, "US-PR" : true, "US-RI" : true, "US-SC" : true, + "US-SD" : true, "US-TN" : true, "US-TX" : true, "US-UM" : true, "US-UT" : true, + "US-VA" : true, "US-VI" : true, "US-VT" : true, "US-WA" : true, "US-WI" : true, + "US-WV" : true, "US-WY" : true, "UY-AR" : true, "UY-CA" : true, "UY-CL" : true, + "UY-CO" : true, "UY-DU" : true, "UY-FD" : true, "UY-FS" : true, "UY-LA" : true, + "UY-MA" : true, "UY-MO" : true, "UY-PA" : true, "UY-RN" : true, "UY-RO" : true, + "UY-RV" : true, "UY-SA" : true, "UY-SJ" : true, "UY-SO" : true, "UY-TA" : true, + "UY-TT" : true, "UZ-AN" : true, "UZ-BU" : true, "UZ-FA" : true, "UZ-JI" : true, + "UZ-NG" : true, "UZ-NW" : true, "UZ-QA" : true, "UZ-QR" : true, "UZ-SA" : true, + "UZ-SI" : true, "UZ-SU" : true, "UZ-TK" : true, "UZ-TO" : true, "UZ-XO" : true, + "VC-01" : true, "VC-02" : true, "VC-03" : true, "VC-04" : true, "VC-05" : true, + "VC-06" : true, "VE-A" : true, "VE-B" : true, "VE-C" : true, "VE-D" : true, + "VE-E" : true, "VE-F" : true, "VE-G" : true, "VE-H" : true, "VE-I" : true, + "VE-J" : true, "VE-K" : true, "VE-L" : true, "VE-M" : true, "VE-N" : true, + "VE-O" : true, "VE-P" : true, "VE-R" : true, "VE-S" : true, "VE-T" : true, + "VE-U" : true, "VE-V" : true, "VE-W" : true, "VE-X" : true, "VE-Y" : true, + "VE-Z" : true, "VN-01" : true, "VN-02" : true, "VN-03" : true, "VN-04" : true, + "VN-05" : true, "VN-06" : true, "VN-07" : true, "VN-09" : true, "VN-13" : true, + "VN-14" : true, "VN-15" : true, "VN-18" : true, "VN-20" : true, "VN-21" : true, + "VN-22" : true, "VN-23" : true, "VN-24" : true, "VN-25" : true, "VN-26" : true, + "VN-27" : true, "VN-28" : true, "VN-29" : true, "VN-30" : true, "VN-31" : true, + "VN-32" : true, "VN-33" : true, "VN-34" : true, "VN-35" : true, "VN-36" : true, + "VN-37" : true, "VN-39" : true, "VN-40" : true, "VN-41" : true, "VN-43" : true, + "VN-44" : true, "VN-45" : true, "VN-46" : true, "VN-47" : true, "VN-49" : true, + "VN-50" : true, "VN-51" : true, "VN-52" : true, "VN-53" : true, "VN-54" : true, + "VN-55" : true, "VN-56" : true, "VN-57" : true, "VN-58" : true, "VN-59" : true, + "VN-61" : true, "VN-63" : true, "VN-66" : true, "VN-67" : true, "VN-68" : true, + "VN-69" : true, "VN-70" : true, "VN-71" : true, "VN-72" : true, "VN-73" : true, + "VN-CT" : true, "VN-DN" : true, "VN-HN" : true, "VN-HP" : true, "VN-SG" : true, + "VU-MAP" : true, "VU-PAM" : true, "VU-SAM" : true, "VU-SEE" : true, "VU-TAE" : true, + "VU-TOB" : true, "WS-AA" : true, "WS-AL" : true, "WS-AT" : true, "WS-FA" : true, + "WS-GE" : true, "WS-GI" : true, "WS-PA" : true, "WS-SA" : true, "WS-TU" : true, + "WS-VF" : true, "WS-VS" : true, "YE-AB" : true, "YE-AD" : true, "YE-AM" : true, + "YE-BA" : true, "YE-DA" : true, "YE-DH" : true, "YE-HD" : true, "YE-HJ" : true, + "YE-IB" : true, "YE-JA" : true, "YE-LA" : true, "YE-MA" : true, "YE-MR" : true, + "YE-MU" : true, "YE-MW" : true, "YE-RA" : true, "YE-SD" : true, "YE-SH" : true, + "YE-SN" : true, "YE-TA" : true, "ZA-EC" : true, "ZA-FS" : true, "ZA-GP" : true, + "ZA-LP" : true, "ZA-MP" : true, "ZA-NC" : true, "ZA-NW" : true, "ZA-WC" : true, + "ZA-ZN" : true, "ZM-01" : true, "ZM-02" : true, "ZM-03" : true, "ZM-04" : true, + "ZM-05" : true, "ZM-06" : true, "ZM-07" : true, "ZM-08" : true, "ZM-09" : true, + "ZW-BU" : true, "ZW-HA" : true, "ZW-MA" : true, "ZW-MC" : true, "ZW-ME" : true, + "ZW-MI" : true, "ZW-MN" : true, "ZW-MS" : true, "ZW-MV" : true, "ZW-MW" : true, +} diff --git a/index/server/vendor/github.com/go-playground/validator/v10/currency_codes.go b/index/server/vendor/github.com/go-playground/validator/v10/currency_codes.go new file mode 100644 index 00000000..a5cd9b18 --- /dev/null +++ b/index/server/vendor/github.com/go-playground/validator/v10/currency_codes.go @@ -0,0 +1,79 @@ +package validator + +var iso4217 = map[string]bool{ + "AFN": true, "EUR": true, "ALL": true, "DZD": true, "USD": true, + "AOA": true, "XCD": true, "ARS": true, "AMD": true, "AWG": true, + "AUD": true, "AZN": true, "BSD": true, "BHD": true, "BDT": true, + "BBD": true, "BYN": true, "BZD": true, "XOF": true, "BMD": true, + "INR": true, "BTN": true, "BOB": true, "BOV": true, "BAM": true, + "BWP": true, "NOK": true, "BRL": true, "BND": true, "BGN": true, + "BIF": true, "CVE": true, "KHR": true, "XAF": true, "CAD": true, + "KYD": true, "CLP": true, "CLF": true, "CNY": true, "COP": true, + "COU": true, "KMF": true, "CDF": true, "NZD": true, "CRC": true, + "HRK": true, "CUP": true, "CUC": true, "ANG": true, "CZK": true, + "DKK": true, "DJF": true, "DOP": true, "EGP": true, "SVC": true, + "ERN": true, "SZL": true, "ETB": true, "FKP": true, "FJD": true, + "XPF": true, "GMD": true, "GEL": true, "GHS": true, "GIP": true, + "GTQ": true, "GBP": true, "GNF": true, "GYD": true, "HTG": true, + "HNL": true, "HKD": true, "HUF": true, "ISK": true, "IDR": true, + "XDR": true, "IRR": true, "IQD": true, "ILS": true, "JMD": true, + "JPY": true, "JOD": true, "KZT": true, "KES": true, "KPW": true, + "KRW": true, "KWD": true, "KGS": true, "LAK": true, "LBP": true, + "LSL": true, "ZAR": true, "LRD": true, "LYD": true, "CHF": true, + "MOP": true, "MKD": true, "MGA": true, "MWK": true, "MYR": true, + "MVR": true, "MRU": true, "MUR": true, "XUA": true, "MXN": true, + "MXV": true, "MDL": true, "MNT": true, "MAD": true, "MZN": true, + "MMK": true, "NAD": true, "NPR": true, "NIO": true, "NGN": true, + "OMR": true, "PKR": true, "PAB": true, "PGK": true, "PYG": true, + "PEN": true, "PHP": true, "PLN": true, "QAR": true, "RON": true, + "RUB": true, "RWF": true, "SHP": true, "WST": true, "STN": true, + "SAR": true, "RSD": true, "SCR": true, "SLL": true, "SGD": true, + "XSU": true, "SBD": true, "SOS": true, "SSP": true, "LKR": true, + "SDG": true, "SRD": true, "SEK": true, "CHE": true, "CHW": true, + "SYP": true, "TWD": true, "TJS": true, "TZS": true, "THB": true, + "TOP": true, "TTD": true, "TND": true, "TRY": true, "TMT": true, + "UGX": true, "UAH": true, "AED": true, "USN": true, "UYU": true, + "UYI": true, "UYW": true, "UZS": true, "VUV": true, "VES": true, + "VND": true, "YER": true, "ZMW": true, "ZWL": true, "XBA": true, + "XBB": true, "XBC": true, "XBD": true, "XTS": true, "XXX": true, + "XAU": true, "XPD": true, "XPT": true, "XAG": true, +} + +var iso4217_numeric = map[int]bool{ + 8: true, 12: true, 32: true, 36: true, 44: true, + 48: true, 50: true, 51: true, 52: true, 60: true, + 64: true, 68: true, 72: true, 84: true, 90: true, + 96: true, 104: true, 108: true, 116: true, 124: true, + 132: true, 136: true, 144: true, 152: true, 156: true, + 170: true, 174: true, 188: true, 191: true, 192: true, + 203: true, 208: true, 214: true, 222: true, 230: true, + 232: true, 238: true, 242: true, 262: true, 270: true, + 292: true, 320: true, 324: true, 328: true, 332: true, + 340: true, 344: true, 348: true, 352: true, 356: true, + 360: true, 364: true, 368: true, 376: true, 388: true, + 392: true, 398: true, 400: true, 404: true, 408: true, + 410: true, 414: true, 417: true, 418: true, 422: true, + 426: true, 430: true, 434: true, 446: true, 454: true, + 458: true, 462: true, 480: true, 484: true, 496: true, + 498: true, 504: true, 512: true, 516: true, 524: true, + 532: true, 533: true, 548: true, 554: true, 558: true, + 566: true, 578: true, 586: true, 590: true, 598: true, + 600: true, 604: true, 608: true, 634: true, 643: true, + 646: true, 654: true, 682: true, 690: true, 694: true, + 702: true, 704: true, 706: true, 710: true, 728: true, + 748: true, 752: true, 756: true, 760: true, 764: true, + 776: true, 780: true, 784: true, 788: true, 800: true, + 807: true, 818: true, 826: true, 834: true, 840: true, + 858: true, 860: true, 882: true, 886: true, 901: true, + 927: true, 928: true, 929: true, 930: true, 931: true, + 932: true, 933: true, 934: true, 936: true, 938: true, + 940: true, 941: true, 943: true, 944: true, 946: true, + 947: true, 948: true, 949: true, 950: true, 951: true, + 952: true, 953: true, 955: true, 956: true, 957: true, + 958: true, 959: true, 960: true, 961: true, 962: true, + 963: true, 964: true, 965: true, 967: true, 968: true, + 969: true, 970: true, 971: true, 972: true, 973: true, + 975: true, 976: true, 977: true, 978: true, 979: true, + 980: true, 981: true, 984: true, 985: true, 986: true, + 990: true, 994: true, 997: true, 999: true, +} diff --git a/index/server/vendor/github.com/go-playground/validator/v10/doc.go b/index/server/vendor/github.com/go-playground/validator/v10/doc.go index a816c20a..7341c67d 100644 --- a/index/server/vendor/github.com/go-playground/validator/v10/doc.go +++ b/index/server/vendor/github.com/go-playground/validator/v10/doc.go @@ -7,6 +7,14 @@ and has the ability to dive into arrays and maps of any type. see more examples https://github.com/go-playground/validator/tree/master/_examples +Singleton + +Validator is designed to be thread-safe and used as a singleton instance. +It caches information about your struct and validations, +in essence only parsing your validation tags once per struct type. +Using multiple instances neglects the benefit of caching. +The not thread-safe functions are explicitly marked as such in the documentation. + Validation Functions Return Type error Doing things this way is actually the way the standard library does, see the @@ -341,6 +349,40 @@ Example: // require the field if the Field1 and Field2 is not present: Usage: required_without_all=Field1 Field2 +Excluded If + +The field under validation must not be present or not empty only if all +the other specified fields are equal to the value following the specified +field. For strings ensures value is not "". For slices, maps, pointers, +interfaces, channels and functions ensures the value is not nil. + + Usage: excluded_if + +Examples: + + // exclude the field if the Field1 is equal to the parameter given: + Usage: excluded_if=Field1 foobar + + // exclude the field if the Field1 and Field2 is equal to the value respectively: + Usage: excluded_if=Field1 foo Field2 bar + +Excluded Unless + +The field under validation must not be present or empty unless all +the other specified fields are equal to the value following the specified +field. For strings ensures value is not "". For slices, maps, pointers, +interfaces, channels and functions ensures the value is not nil. + + Usage: excluded_unless + +Examples: + + // exclude the field unless the Field1 is equal to the parameter given: + Usage: excluded_unless=Field1 foobar + + // exclude the field unless the Field1 and Field2 is equal to the value respectively: + Usage: excluded_unless=Field1 foo Field2 bar + Is Default This validates that the value is the default value and is almost the @@ -726,6 +768,12 @@ This validates that a string value contains unicode alphanumeric characters only Usage: alphanumunicode +Boolean + +This validates that a string value can successfully be parsed into a boolean with strconv.ParseBool + + Usage: boolean + Number This validates that a string value contains number values only. @@ -811,6 +859,12 @@ This validates that a string value is valid JSON Usage: json +JWT String + +This validates that a string value is a valid JWT + + Usage: jwt + File path This validates that a string value contains a valid file path and that @@ -987,6 +1041,12 @@ This validates that a string value contains a valid version 5 UUID. Uppercase U Usage: uuid5 +Universally Unique Lexicographically Sortable Identifier ULID + +This validates that a string value contains a valid ULID value. + + Usage: ulid + ASCII This validates that a string value contains only ASCII characters. @@ -1221,6 +1281,27 @@ see: https://www.iso.org/iso-3166-country-codes.html Usage: iso3166_1_alpha3 +BCP 47 Language Tag + +This validates that a string value is a valid BCP 47 language tag, as parsed by language.Parse. +More information on https://pkg.go.dev/golang.org/x/text/language + + Usage: bcp47_language_tag + +BIC (SWIFT code) + +This validates that a string value is a valid Business Identifier Code (SWIFT code), defined in ISO 9362. +More information on https://www.iso.org/standard/60390.html + + Usage: bic + +RFC 1035 label + +This validates that a string value is a valid dns RFC 1035 label, defined in RFC 1035. +More information on https://datatracker.ietf.org/doc/html/rfc1035 + + Usage: dns_rfc1035_label + TimeZone This validates that a string value is a valid time zone based on the time zone database present on the system. @@ -1228,7 +1309,19 @@ Although empty value and Local value are allowed by time.LoadLocation golang fun More information on https://golang.org/pkg/time/#LoadLocation Usage: timezone - + +Semantic Version + +This validates that a string value is a valid semver version, defined in Semantic Versioning 2.0.0. +More information on https://semver.org/ + + Usage: semver + +Credit Card + +This validates that a string value contains a valid credit card number using Luhn algoritm. + + Usage: credit_card Alias Validators and Tags diff --git a/index/server/vendor/github.com/go-playground/validator/v10/errors.go b/index/server/vendor/github.com/go-playground/validator/v10/errors.go index 63293cf9..9a1b1abe 100644 --- a/index/server/vendor/github.com/go-playground/validator/v10/errors.go +++ b/index/server/vendor/github.com/go-playground/validator/v10/errors.go @@ -82,7 +82,7 @@ func (ve ValidationErrors) Translate(ut ut.Translator) ValidationErrorsTranslati // FieldError contains all functions to get error details type FieldError interface { - // returns the validation tag that failed. if the + // Tag returns the validation tag that failed. if the // validation was an alias, this will return the // alias name and not the underlying tag that failed. // @@ -90,7 +90,7 @@ type FieldError interface { // will return "iscolor" Tag() string - // returns the validation tag that failed, even if an + // ActualTag returns the validation tag that failed, even if an // alias the actual tag within the alias will be returned. // If an 'or' validation fails the entire or will be returned. // @@ -98,7 +98,7 @@ type FieldError interface { // will return "hexcolor|rgb|rgba|hsl|hsla" ActualTag() string - // returns the namespace for the field error, with the tag + // Namespace returns the namespace for the field error, with the tag // name taking precedence over the field's actual name. // // eg. JSON name "User.fname" @@ -109,7 +109,7 @@ type FieldError interface { // using validate.Field(...) as there is no way to extract it's name Namespace() string - // returns the namespace for the field error, with the field's + // StructNamespace returns the namespace for the field error, with the field's // actual name. // // eq. "User.FirstName" see Namespace for comparison @@ -118,24 +118,24 @@ type FieldError interface { // using validate.Field(...) as there is no way to extract its name StructNamespace() string - // returns the fields name with the tag name taking precedence over the + // Field returns the fields name with the tag name taking precedence over the // field's actual name. // // eq. JSON name "fname" // see StructField for comparison Field() string - // returns the field's actual name from the struct, when able to determine. + // StructField returns the field's actual name from the struct, when able to determine. // // eq. "FirstName" // see Field for comparison StructField() string - // returns the actual field's value in case needed for creating the error + // Value returns the actual field's value in case needed for creating the error // message Value() interface{} - // returns the param value, in string form for comparison; this will also + // Param returns the param value, in string form for comparison; this will also // help with generating an error message Param() string @@ -146,10 +146,10 @@ type FieldError interface { // Type returns the Field's reflect Type // - // // eg. time.Time's type is time.Time + // eg. time.Time's type is time.Time Type() reflect.Type - // returns the FieldError's translated error + // Translate returns the FieldError's translated error // from the provided 'ut.Translator' and registered 'TranslationFunc' // // NOTE: if no registered translator can be found it returns the same as @@ -221,7 +221,7 @@ func (fe *fieldError) Field() string { // return fld } -// returns the field's actual name from the struct, when able to determine. +// StructField returns the field's actual name from the struct, when able to determine. func (fe *fieldError) StructField() string { // return fe.structField return fe.structNs[len(fe.structNs)-int(fe.structfieldLen):] diff --git a/index/server/vendor/github.com/go-playground/validator/v10/field_level.go b/index/server/vendor/github.com/go-playground/validator/v10/field_level.go index f0e2a9a8..ef35826e 100644 --- a/index/server/vendor/github.com/go-playground/validator/v10/field_level.go +++ b/index/server/vendor/github.com/go-playground/validator/v10/field_level.go @@ -5,24 +5,25 @@ import "reflect" // FieldLevel contains all the information and helper functions // to validate a field type FieldLevel interface { - // returns the top level struct, if any + + // Top returns the top level struct, if any Top() reflect.Value - // returns the current fields parent struct, if any or + // Parent returns the current fields parent struct, if any or // the comparison value if called 'VarWithValue' Parent() reflect.Value - // returns current field for validation + // Field returns current field for validation Field() reflect.Value - // returns the field's name with the tag + // FieldName returns the field's name with the tag // name taking precedence over the fields actual name. FieldName() string - // returns the struct field's name + // StructFieldName returns the struct field's name StructFieldName() string - // returns param for validation against current field + // Param returns param for validation against current field Param() string // GetTag returns the current validations tag name @@ -33,7 +34,7 @@ type FieldLevel interface { // underlying value and it's kind. ExtractType(field reflect.Value) (value reflect.Value, kind reflect.Kind, nullable bool) - // traverses the parent struct to retrieve a specific field denoted by the provided namespace + // GetStructFieldOK traverses the parent struct to retrieve a specific field denoted by the provided namespace // in the param and returns the field, field kind and whether is was successful in retrieving // the field at all. // @@ -49,7 +50,7 @@ type FieldLevel interface { // Deprecated: Use GetStructFieldOKAdvanced2() instead which also return if the value is nullable. GetStructFieldOKAdvanced(val reflect.Value, namespace string) (reflect.Value, reflect.Kind, bool) - // traverses the parent struct to retrieve a specific field denoted by the provided namespace + // GetStructFieldOK2 traverses the parent struct to retrieve a specific field denoted by the provided namespace // in the param and returns the field, field kind, if it's a nullable type and whether is was successful in retrieving // the field at all. // @@ -57,7 +58,7 @@ type FieldLevel interface { // could not be retrieved because it didn't exist. GetStructFieldOK2() (reflect.Value, reflect.Kind, bool, bool) - // GetStructFieldOKAdvanced is the same as GetStructFieldOK except that it accepts the parent struct to start looking for + // GetStructFieldOKAdvanced2 is the same as GetStructFieldOK except that it accepts the parent struct to start looking for // the field and namespace allowing more extensibility for validators. GetStructFieldOKAdvanced2(val reflect.Value, namespace string) (reflect.Value, reflect.Kind, bool, bool) } @@ -107,12 +108,12 @@ func (v *validate) GetStructFieldOKAdvanced(val reflect.Value, namespace string) return current, kind, found } -// GetStructFieldOK returns Param returns param for validation against current field +// GetStructFieldOK2 returns Param returns param for validation against current field func (v *validate) GetStructFieldOK2() (reflect.Value, reflect.Kind, bool, bool) { return v.getStructFieldOKInternal(v.slflParent, v.ct.param) } -// GetStructFieldOKAdvanced is the same as GetStructFieldOK except that it accepts the parent struct to start looking for +// GetStructFieldOKAdvanced2 is the same as GetStructFieldOK except that it accepts the parent struct to start looking for // the field and namespace allowing more extensibility for validators. func (v *validate) GetStructFieldOKAdvanced2(val reflect.Value, namespace string) (reflect.Value, reflect.Kind, bool, bool) { return v.getStructFieldOKInternal(val, namespace) diff --git a/index/server/vendor/github.com/go-playground/validator/v10/postcode_regexes.go b/index/server/vendor/github.com/go-playground/validator/v10/postcode_regexes.go new file mode 100644 index 00000000..e7e7b687 --- /dev/null +++ b/index/server/vendor/github.com/go-playground/validator/v10/postcode_regexes.go @@ -0,0 +1,173 @@ +package validator + +import "regexp" + +var postCodePatternDict = map[string]string{ + "GB": `^GIR[ ]?0AA|((AB|AL|B|BA|BB|BD|BH|BL|BN|BR|BS|BT|CA|CB|CF|CH|CM|CO|CR|CT|CV|CW|DA|DD|DE|DG|DH|DL|DN|DT|DY|E|EC|EH|EN|EX|FK|FY|G|GL|GY|GU|HA|HD|HG|HP|HR|HS|HU|HX|IG|IM|IP|IV|JE|KA|KT|KW|KY|L|LA|LD|LE|LL|LN|LS|LU|M|ME|MK|ML|N|NE|NG|NN|NP|NR|NW|OL|OX|PA|PE|PH|PL|PO|PR|RG|RH|RM|S|SA|SE|SG|SK|SL|SM|SN|SO|SP|SR|SS|ST|SW|SY|TA|TD|TF|TN|TQ|TR|TS|TW|UB|W|WA|WC|WD|WF|WN|WR|WS|WV|YO|ZE)(\d[\dA-Z]?[ ]?\d[ABD-HJLN-UW-Z]{2}))|BFPO[ ]?\d{1,4}$`, + "JE": `^JE\d[\dA-Z]?[ ]?\d[ABD-HJLN-UW-Z]{2}$`, + "GG": `^GY\d[\dA-Z]?[ ]?\d[ABD-HJLN-UW-Z]{2}$`, + "IM": `^IM\d[\dA-Z]?[ ]?\d[ABD-HJLN-UW-Z]{2}$`, + "US": `^\d{5}([ \-]\d{4})?$`, + "CA": `^[ABCEGHJKLMNPRSTVXY]\d[ABCEGHJ-NPRSTV-Z][ ]?\d[ABCEGHJ-NPRSTV-Z]\d$`, + "DE": `^\d{5}$`, + "JP": `^\d{3}-\d{4}$`, + "FR": `^\d{2}[ ]?\d{3}$`, + "AU": `^\d{4}$`, + "IT": `^\d{5}$`, + "CH": `^\d{4}$`, + "AT": `^\d{4}$`, + "ES": `^\d{5}$`, + "NL": `^\d{4}[ ]?[A-Z]{2}$`, + "BE": `^\d{4}$`, + "DK": `^\d{4}$`, + "SE": `^\d{3}[ ]?\d{2}$`, + "NO": `^\d{4}$`, + "BR": `^\d{5}[\-]?\d{3}$`, + "PT": `^\d{4}([\-]\d{3})?$`, + "FI": `^\d{5}$`, + "AX": `^22\d{3}$`, + "KR": `^\d{3}[\-]\d{3}$`, + "CN": `^\d{6}$`, + "TW": `^\d{3}(\d{2})?$`, + "SG": `^\d{6}$`, + "DZ": `^\d{5}$`, + "AD": `^AD\d{3}$`, + "AR": `^([A-HJ-NP-Z])?\d{4}([A-Z]{3})?$`, + "AM": `^(37)?\d{4}$`, + "AZ": `^\d{4}$`, + "BH": `^((1[0-2]|[2-9])\d{2})?$`, + "BD": `^\d{4}$`, + "BB": `^(BB\d{5})?$`, + "BY": `^\d{6}$`, + "BM": `^[A-Z]{2}[ ]?[A-Z0-9]{2}$`, + "BA": `^\d{5}$`, + "IO": `^BBND 1ZZ$`, + "BN": `^[A-Z]{2}[ ]?\d{4}$`, + "BG": `^\d{4}$`, + "KH": `^\d{5}$`, + "CV": `^\d{4}$`, + "CL": `^\d{7}$`, + "CR": `^\d{4,5}|\d{3}-\d{4}$`, + "HR": `^\d{5}$`, + "CY": `^\d{4}$`, + "CZ": `^\d{3}[ ]?\d{2}$`, + "DO": `^\d{5}$`, + "EC": `^([A-Z]\d{4}[A-Z]|(?:[A-Z]{2})?\d{6})?$`, + "EG": `^\d{5}$`, + "EE": `^\d{5}$`, + "FO": `^\d{3}$`, + "GE": `^\d{4}$`, + "GR": `^\d{3}[ ]?\d{2}$`, + "GL": `^39\d{2}$`, + "GT": `^\d{5}$`, + "HT": `^\d{4}$`, + "HN": `^(?:\d{5})?$`, + "HU": `^\d{4}$`, + "IS": `^\d{3}$`, + "IN": `^\d{6}$`, + "ID": `^\d{5}$`, + "IL": `^\d{5}$`, + "JO": `^\d{5}$`, + "KZ": `^\d{6}$`, + "KE": `^\d{5}$`, + "KW": `^\d{5}$`, + "LA": `^\d{5}$`, + "LV": `^\d{4}$`, + "LB": `^(\d{4}([ ]?\d{4})?)?$`, + "LI": `^(948[5-9])|(949[0-7])$`, + "LT": `^\d{5}$`, + "LU": `^\d{4}$`, + "MK": `^\d{4}$`, + "MY": `^\d{5}$`, + "MV": `^\d{5}$`, + "MT": `^[A-Z]{3}[ ]?\d{2,4}$`, + "MU": `^(\d{3}[A-Z]{2}\d{3})?$`, + "MX": `^\d{5}$`, + "MD": `^\d{4}$`, + "MC": `^980\d{2}$`, + "MA": `^\d{5}$`, + "NP": `^\d{5}$`, + "NZ": `^\d{4}$`, + "NI": `^((\d{4}-)?\d{3}-\d{3}(-\d{1})?)?$`, + "NG": `^(\d{6})?$`, + "OM": `^(PC )?\d{3}$`, + "PK": `^\d{5}$`, + "PY": `^\d{4}$`, + "PH": `^\d{4}$`, + "PL": `^\d{2}-\d{3}$`, + "PR": `^00[679]\d{2}([ \-]\d{4})?$`, + "RO": `^\d{6}$`, + "RU": `^\d{6}$`, + "SM": `^4789\d$`, + "SA": `^\d{5}$`, + "SN": `^\d{5}$`, + "SK": `^\d{3}[ ]?\d{2}$`, + "SI": `^\d{4}$`, + "ZA": `^\d{4}$`, + "LK": `^\d{5}$`, + "TJ": `^\d{6}$`, + "TH": `^\d{5}$`, + "TN": `^\d{4}$`, + "TR": `^\d{5}$`, + "TM": `^\d{6}$`, + "UA": `^\d{5}$`, + "UY": `^\d{5}$`, + "UZ": `^\d{6}$`, + "VA": `^00120$`, + "VE": `^\d{4}$`, + "ZM": `^\d{5}$`, + "AS": `^96799$`, + "CC": `^6799$`, + "CK": `^\d{4}$`, + "RS": `^\d{6}$`, + "ME": `^8\d{4}$`, + "CS": `^\d{5}$`, + "YU": `^\d{5}$`, + "CX": `^6798$`, + "ET": `^\d{4}$`, + "FK": `^FIQQ 1ZZ$`, + "NF": `^2899$`, + "FM": `^(9694[1-4])([ \-]\d{4})?$`, + "GF": `^9[78]3\d{2}$`, + "GN": `^\d{3}$`, + "GP": `^9[78][01]\d{2}$`, + "GS": `^SIQQ 1ZZ$`, + "GU": `^969[123]\d([ \-]\d{4})?$`, + "GW": `^\d{4}$`, + "HM": `^\d{4}$`, + "IQ": `^\d{5}$`, + "KG": `^\d{6}$`, + "LR": `^\d{4}$`, + "LS": `^\d{3}$`, + "MG": `^\d{3}$`, + "MH": `^969[67]\d([ \-]\d{4})?$`, + "MN": `^\d{6}$`, + "MP": `^9695[012]([ \-]\d{4})?$`, + "MQ": `^9[78]2\d{2}$`, + "NC": `^988\d{2}$`, + "NE": `^\d{4}$`, + "VI": `^008(([0-4]\d)|(5[01]))([ \-]\d{4})?$`, + "VN": `^[0-9]{1,6}$`, + "PF": `^987\d{2}$`, + "PG": `^\d{3}$`, + "PM": `^9[78]5\d{2}$`, + "PN": `^PCRN 1ZZ$`, + "PW": `^96940$`, + "RE": `^9[78]4\d{2}$`, + "SH": `^(ASCN|STHL) 1ZZ$`, + "SJ": `^\d{4}$`, + "SO": `^\d{5}$`, + "SZ": `^[HLMS]\d{3}$`, + "TC": `^TKCA 1ZZ$`, + "WF": `^986\d{2}$`, + "XK": `^\d{5}$`, + "YT": `^976\d{2}$`, +} + +var postCodeRegexDict = map[string]*regexp.Regexp{} + +func init() { + for countryCode, pattern := range postCodePatternDict { + postCodeRegexDict[countryCode] = regexp.MustCompile(pattern) + } +} diff --git a/index/server/vendor/github.com/go-playground/validator/v10/regexes.go b/index/server/vendor/github.com/go-playground/validator/v10/regexes.go index b741f4e1..9c1c6342 100644 --- a/index/server/vendor/github.com/go-playground/validator/v10/regexes.go +++ b/index/server/vendor/github.com/go-playground/validator/v10/regexes.go @@ -10,7 +10,7 @@ const ( numericRegexString = "^[-+]?[0-9]+(?:\\.[0-9]+)?$" numberRegexString = "^[0-9]+$" hexadecimalRegexString = "^(0[xX])?[0-9a-fA-F]+$" - hexcolorRegexString = "^#(?:[0-9a-fA-F]{3}|[0-9a-fA-F]{6})$" + hexColorRegexString = "^#(?:[0-9a-fA-F]{3}|[0-9a-fA-F]{4}|[0-9a-fA-F]{6}|[0-9a-fA-F]{8})$" rgbRegexString = "^rgb\\(\\s*(?:(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*,\\s*(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*,\\s*(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])|(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])%\\s*,\\s*(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])%\\s*,\\s*(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])%)\\s*\\)$" rgbaRegexString = "^rgba\\(\\s*(?:(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*,\\s*(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*,\\s*(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])|(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])%\\s*,\\s*(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])%\\s*,\\s*(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])%)\\s*,\\s*(?:(?:0.[1-9]*)|[01])\\s*\\)$" hslRegexString = "^hsl\\(\\s*(?:0|[1-9]\\d?|[12]\\d\\d|3[0-5]\\d|360)\\s*,\\s*(?:(?:0|[1-9]\\d?|100)%)\\s*,\\s*(?:(?:0|[1-9]\\d?|100)%)\\s*\\)$" @@ -29,6 +29,17 @@ const ( uUID4RFC4122RegexString = "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-4[0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" uUID5RFC4122RegexString = "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-5[0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" uUIDRFC4122RegexString = "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$" + uLIDRegexString = "^[A-HJKMNP-TV-Z0-9]{26}$" + md4RegexString = "^[0-9a-f]{32}$" + md5RegexString = "^[0-9a-f]{32}$" + sha256RegexString = "^[0-9a-f]{64}$" + sha384RegexString = "^[0-9a-f]{96}$" + sha512RegexString = "^[0-9a-f]{128}$" + ripemd128RegexString = "^[0-9a-f]{32}$" + ripemd160RegexString = "^[0-9a-f]{40}$" + tiger128RegexString = "^[0-9a-f]{32}$" + tiger160RegexString = "^[0-9a-f]{40}$" + tiger192RegexString = "^[0-9a-f]{48}$" aSCIIRegexString = "^[\x00-\x7F]*$" printableASCIIRegexString = "^[\x20-\x7E]*$" multibyteRegexString = "[^\x00-\x7F]" @@ -36,19 +47,23 @@ const ( latitudeRegexString = "^[-+]?([1-8]?\\d(\\.\\d+)?|90(\\.0+)?)$" longitudeRegexString = "^[-+]?(180(\\.0+)?|((1[0-7]\\d)|([1-9]?\\d))(\\.\\d+)?)$" sSNRegexString = `^[0-9]{3}[ -]?(0[1-9]|[1-9][0-9])[ -]?([1-9][0-9]{3}|[0-9][1-9][0-9]{2}|[0-9]{2}[1-9][0-9]|[0-9]{3}[1-9])$` - hostnameRegexStringRFC952 = `^[a-zA-Z]([a-zA-Z0-9\-]+[\.]?)*[a-zA-Z0-9]$` // https://tools.ietf.org/html/rfc952 - hostnameRegexStringRFC1123 = `^([a-zA-Z0-9]{1}[a-zA-Z0-9_-]{0,62}){1}(\.[a-zA-Z0-9_]{1}[a-zA-Z0-9_-]{0,62})*?$` // accepts hostname starting with a digit https://tools.ietf.org/html/rfc1123 - fqdnRegexStringRFC1123 = `^([a-zA-Z0-9]{1}[a-zA-Z0-9_-]{0,62})(\.[a-zA-Z0-9_]{1}[a-zA-Z0-9_-]{0,62})*?(\.[a-zA-Z]{1}[a-zA-Z0-9]{0,62})\.?$` // same as hostnameRegexStringRFC1123 but must contain a non numerical TLD (possibly ending with '.') - btcAddressRegexString = `^[13][a-km-zA-HJ-NP-Z1-9]{25,34}$` // bitcoin address - btcAddressUpperRegexStringBech32 = `^BC1[02-9AC-HJ-NP-Z]{7,76}$` // bitcoin bech32 address https://en.bitcoin.it/wiki/Bech32 - btcAddressLowerRegexStringBech32 = `^bc1[02-9ac-hj-np-z]{7,76}$` // bitcoin bech32 address https://en.bitcoin.it/wiki/Bech32 + hostnameRegexStringRFC952 = `^[a-zA-Z]([a-zA-Z0-9\-]+[\.]?)*[a-zA-Z0-9]$` // https://tools.ietf.org/html/rfc952 + hostnameRegexStringRFC1123 = `^([a-zA-Z0-9]{1}[a-zA-Z0-9-]{0,62}){1}(\.[a-zA-Z0-9]{1}[a-zA-Z0-9-]{0,62})*?$` // accepts hostname starting with a digit https://tools.ietf.org/html/rfc1123 + fqdnRegexStringRFC1123 = `^([a-zA-Z0-9]{1}[a-zA-Z0-9-]{0,62})(\.[a-zA-Z0-9]{1}[a-zA-Z0-9-]{0,62})*?(\.[a-zA-Z]{1}[a-zA-Z0-9]{0,62})\.?$` // same as hostnameRegexStringRFC1123 but must contain a non numerical TLD (possibly ending with '.') + btcAddressRegexString = `^[13][a-km-zA-HJ-NP-Z1-9]{25,34}$` // bitcoin address + btcAddressUpperRegexStringBech32 = `^BC1[02-9AC-HJ-NP-Z]{7,76}$` // bitcoin bech32 address https://en.bitcoin.it/wiki/Bech32 + btcAddressLowerRegexStringBech32 = `^bc1[02-9ac-hj-np-z]{7,76}$` // bitcoin bech32 address https://en.bitcoin.it/wiki/Bech32 ethAddressRegexString = `^0x[0-9a-fA-F]{40}$` ethAddressUpperRegexString = `^0x[0-9A-F]{40}$` ethAddressLowerRegexString = `^0x[0-9a-f]{40}$` - uRLEncodedRegexString = `(%[A-Fa-f0-9]{2})` + uRLEncodedRegexString = `^(?:[^%]|%[0-9A-Fa-f]{2})*$` hTMLEncodedRegexString = `&#[x]?([0-9a-fA-F]{2})|(>)|(<)|(")|(&)+[;]?` hTMLRegexString = `<[/]?([a-zA-Z]+).*?>` + jWTRegexString = "^[A-Za-z0-9-_]+\\.[A-Za-z0-9-_]+\\.[A-Za-z0-9-_]*$" splitParamsRegexString = `'[^']*'|\S+` + bicRegexString = `^[A-Za-z]{6}[A-Za-z0-9]{2}([A-Za-z0-9]{3})?$` + semverRegexString = `^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$` // numbered capture groups https://semver.org/ + dnsRegexStringRFC1035Label = "^[a-z]([-a-z0-9]*[a-z0-9]){0,62}$" ) var ( @@ -59,7 +74,7 @@ var ( numericRegex = regexp.MustCompile(numericRegexString) numberRegex = regexp.MustCompile(numberRegexString) hexadecimalRegex = regexp.MustCompile(hexadecimalRegexString) - hexcolorRegex = regexp.MustCompile(hexcolorRegexString) + hexColorRegex = regexp.MustCompile(hexColorRegexString) rgbRegex = regexp.MustCompile(rgbRegexString) rgbaRegex = regexp.MustCompile(rgbaRegexString) hslRegex = regexp.MustCompile(hslRegexString) @@ -78,6 +93,17 @@ var ( uUID4RFC4122Regex = regexp.MustCompile(uUID4RFC4122RegexString) uUID5RFC4122Regex = regexp.MustCompile(uUID5RFC4122RegexString) uUIDRFC4122Regex = regexp.MustCompile(uUIDRFC4122RegexString) + uLIDRegex = regexp.MustCompile(uLIDRegexString) + md4Regex = regexp.MustCompile(md4RegexString) + md5Regex = regexp.MustCompile(md5RegexString) + sha256Regex = regexp.MustCompile(sha256RegexString) + sha384Regex = regexp.MustCompile(sha384RegexString) + sha512Regex = regexp.MustCompile(sha512RegexString) + ripemd128Regex = regexp.MustCompile(ripemd128RegexString) + ripemd160Regex = regexp.MustCompile(ripemd160RegexString) + tiger128Regex = regexp.MustCompile(tiger128RegexString) + tiger160Regex = regexp.MustCompile(tiger160RegexString) + tiger192Regex = regexp.MustCompile(tiger192RegexString) aSCIIRegex = regexp.MustCompile(aSCIIRegexString) printableASCIIRegex = regexp.MustCompile(printableASCIIRegexString) multibyteRegex = regexp.MustCompile(multibyteRegexString) @@ -92,10 +118,14 @@ var ( btcUpperAddressRegexBech32 = regexp.MustCompile(btcAddressUpperRegexStringBech32) btcLowerAddressRegexBech32 = regexp.MustCompile(btcAddressLowerRegexStringBech32) ethAddressRegex = regexp.MustCompile(ethAddressRegexString) - ethaddressRegexUpper = regexp.MustCompile(ethAddressUpperRegexString) + ethAddressRegexUpper = regexp.MustCompile(ethAddressUpperRegexString) ethAddressRegexLower = regexp.MustCompile(ethAddressLowerRegexString) uRLEncodedRegex = regexp.MustCompile(uRLEncodedRegexString) hTMLEncodedRegex = regexp.MustCompile(hTMLEncodedRegexString) hTMLRegex = regexp.MustCompile(hTMLRegexString) + jWTRegex = regexp.MustCompile(jWTRegexString) splitParamsRegex = regexp.MustCompile(splitParamsRegexString) + bicRegex = regexp.MustCompile(bicRegexString) + semverRegex = regexp.MustCompile(semverRegexString) + dnsRegexRFC1035Label = regexp.MustCompile(dnsRegexStringRFC1035Label) ) diff --git a/index/server/vendor/github.com/go-playground/validator/v10/struct_level.go b/index/server/vendor/github.com/go-playground/validator/v10/struct_level.go index 57691ee3..c0d89cfb 100644 --- a/index/server/vendor/github.com/go-playground/validator/v10/struct_level.go +++ b/index/server/vendor/github.com/go-playground/validator/v10/struct_level.go @@ -23,18 +23,18 @@ func wrapStructLevelFunc(fn StructLevelFunc) StructLevelFuncCtx { // to validate a struct type StructLevel interface { - // returns the main validation object, in case one wants to call validations internally. + // Validator returns the main validation object, in case one wants to call validations internally. // this is so you don't have to use anonymous functions to get access to the validate // instance. Validator() *Validate - // returns the top level struct, if any + // Top returns the top level struct, if any Top() reflect.Value - // returns the current fields parent struct, if any + // Parent returns the current fields parent struct, if any Parent() reflect.Value - // returns the current struct. + // Current returns the current struct. Current() reflect.Value // ExtractType gets the actual underlying type of field value. @@ -42,7 +42,7 @@ type StructLevel interface { // underlying value and its kind. ExtractType(field reflect.Value) (value reflect.Value, kind reflect.Kind, nullable bool) - // reports an error just by passing the field and tag information + // ReportError reports an error just by passing the field and tag information // // NOTES: // @@ -54,7 +54,7 @@ type StructLevel interface { // and process on the flip side it's up to you. ReportError(field interface{}, fieldName, structFieldName string, tag, param string) - // reports an error just by passing ValidationErrors + // ReportValidationErrors reports an error just by passing ValidationErrors // // NOTES: // diff --git a/index/server/vendor/github.com/go-playground/validator/v10/util.go b/index/server/vendor/github.com/go-playground/validator/v10/util.go index 56420f43..36da8551 100644 --- a/index/server/vendor/github.com/go-playground/validator/v10/util.go +++ b/index/server/vendor/github.com/go-playground/validator/v10/util.go @@ -82,7 +82,7 @@ BEGIN: fld := namespace var ns string - if typ != timeType { + if !typ.ConvertibleTo(timeType) { idx := strings.Index(namespace, namespaceSeparator) diff --git a/index/server/vendor/github.com/go-playground/validator/v10/validator.go b/index/server/vendor/github.com/go-playground/validator/v10/validator.go index f097f394..80da095a 100644 --- a/index/server/vendor/github.com/go-playground/validator/v10/validator.go +++ b/index/server/vendor/github.com/go-playground/validator/v10/validator.go @@ -74,7 +74,7 @@ func (v *validate) validateStruct(ctx context.Context, parent reflect.Value, cur } } - v.traverseField(ctx, parent, current.Field(f.idx), ns, structNs, f, f.cTags) + v.traverseField(ctx, current, current.Field(f.idx), ns, structNs, f, f.cTags) } } @@ -164,7 +164,7 @@ func (v *validate) traverseField(ctx context.Context, parent reflect.Value, curr typ = current.Type() - if typ != timeType { + if !typ.ConvertibleTo(timeType) { if ct != nil { @@ -222,12 +222,12 @@ func (v *validate) traverseField(ctx context.Context, parent reflect.Value, curr structNs = append(append(structNs, cf.name...), '.') } - v.validateStruct(ctx, current, current, typ, ns, structNs, ct) + v.validateStruct(ctx, parent, current, typ, ns, structNs, ct) return } } - if !ct.hasTag { + if ct == nil || !ct.hasTag { return } @@ -355,6 +355,10 @@ OUTER: v.ct = ct if ct.fn(ctx, v) { + if ct.isBlockEnd { + ct = ct.next + continue OUTER + } // drain rest of the 'or' values, then continue or leave for { @@ -368,6 +372,11 @@ OUTER: if ct.typeof != typeOr { continue OUTER } + + if ct.isBlockEnd { + ct = ct.next + continue OUTER + } } } diff --git a/index/server/vendor/github.com/go-playground/validator/v10/validator_instance.go b/index/server/vendor/github.com/go-playground/validator/v10/validator_instance.go index fe6a4877..9493da49 100644 --- a/index/server/vendor/github.com/go-playground/validator/v10/validator_instance.go +++ b/index/server/vendor/github.com/go-playground/validator/v10/validator_instance.go @@ -29,6 +29,12 @@ const ( requiredWithAllTag = "required_with_all" requiredIfTag = "required_if" requiredUnlessTag = "required_unless" + excludedWithoutAllTag = "excluded_without_all" + excludedWithoutTag = "excluded_without" + excludedWithTag = "excluded_with" + excludedWithAllTag = "excluded_with_all" + excludedIfTag = "excluded_if" + excludedUnlessTag = "excluded_unless" skipValidationTag = "-" diveTag = "dive" keysTag = "keys" @@ -80,11 +86,16 @@ type Validate struct { aliases map[string]string validations map[string]internalValidationFuncWrapper transTagFunc map[ut.Translator]map[string]TranslationFunc // map[]map[]TranslationFunc + rules map[reflect.Type]map[string]string tagCache *tagCache structCache *structCache } // New returns a new instance of 'validate' with sane defaults. +// Validate is designed to be thread-safe and used as a singleton instance. +// It caches information about your struct and validations, +// in essence only parsing your validation tags once per struct type. +// Using multiple instances neglects the benefit of caching. func New() *Validate { tc := new(tagCache) @@ -111,7 +122,8 @@ func New() *Validate { switch k { // these require that even if the value is nil that the validation should run, omitempty still overrides this behaviour - case requiredIfTag, requiredUnlessTag, requiredWithTag, requiredWithAllTag, requiredWithoutTag, requiredWithoutAllTag: + case requiredIfTag, requiredUnlessTag, requiredWithTag, requiredWithAllTag, requiredWithoutTag, requiredWithoutAllTag, + excludedIfTag, excludedUnlessTag, excludedWithTag, excludedWithAllTag, excludedWithoutTag, excludedWithoutAllTag: _ = v.registerValidation(k, wrapFunc(val), true, true) default: // no need to error check here, baked in will always be valid @@ -138,12 +150,49 @@ func (v *Validate) SetTagName(name string) { v.tagName = name } +// ValidateMapCtx validates a map using a map of validation rules and allows passing of contextual +// validation validation information via context.Context. +func (v Validate) ValidateMapCtx(ctx context.Context, data map[string]interface{}, rules map[string]interface{}) map[string]interface{} { + errs := make(map[string]interface{}) + for field, rule := range rules { + if ruleObj, ok := rule.(map[string]interface{}); ok { + if dataObj, ok := data[field].(map[string]interface{}); ok { + err := v.ValidateMapCtx(ctx, dataObj, ruleObj) + if len(err) > 0 { + errs[field] = err + } + } else if dataObjs, ok := data[field].([]map[string]interface{}); ok { + for _, obj := range dataObjs { + err := v.ValidateMapCtx(ctx, obj, ruleObj) + if len(err) > 0 { + errs[field] = err + } + } + } else { + errs[field] = errors.New("The field: '" + field + "' is not a map to dive") + } + } else if ruleStr, ok := rule.(string); ok { + err := v.VarCtx(ctx, data[field], ruleStr) + if err != nil { + errs[field] = err + } + } + } + return errs +} + +// ValidateMap validates map data from a map of tags +func (v *Validate) ValidateMap(data map[string]interface{}, rules map[string]interface{}) map[string]interface{} { + return v.ValidateMapCtx(context.Background(), data, rules) +} + // RegisterTagNameFunc registers a function to get alternate names for StructFields. // // eg. to use the names which have been specified for JSON representations of structs, rather than normal Go field names: // // validate.RegisterTagNameFunc(func(fld reflect.StructField) string { // name := strings.SplitN(fld.Tag.Get("json"), ",", 2)[0] +// // skip if tag key says it should be ignored // if name == "-" { // return "" // } @@ -175,11 +224,11 @@ func (v *Validate) RegisterValidationCtx(tag string, fn FuncCtx, callValidationE func (v *Validate) registerValidation(tag string, fn FuncCtx, bakedIn bool, nilCheckable bool) error { if len(tag) == 0 { - return errors.New("Function Key cannot be empty") + return errors.New("function Key cannot be empty") } if fn == nil { - return errors.New("Function cannot be empty") + return errors.New("function cannot be empty") } _, ok := restrictedTags[tag] @@ -235,6 +284,34 @@ func (v *Validate) RegisterStructValidationCtx(fn StructLevelFuncCtx, types ...i } } +// RegisterStructValidationMapRules registers validate map rules. +// Be aware that map validation rules supersede those defined on a/the struct if present. +// +// NOTE: this method is not thread-safe it is intended that these all be registered prior to any validation +func (v *Validate) RegisterStructValidationMapRules(rules map[string]string, types ...interface{}) { + if v.rules == nil { + v.rules = make(map[reflect.Type]map[string]string) + } + + deepCopyRules := make(map[string]string) + for i, rule := range rules { + deepCopyRules[i] = rule + } + + for _, t := range types { + typ := reflect.TypeOf(t) + + if typ.Kind() == reflect.Ptr { + typ = typ.Elem() + } + + if typ.Kind() != reflect.Struct { + continue + } + v.rules[typ] = deepCopyRules + } +} + // RegisterCustomTypeFunc registers a CustomTypeFunc against a number of types // // NOTE: this method is not thread-safe it is intended that these all be registered prior to any validation @@ -295,7 +372,7 @@ func (v *Validate) StructCtx(ctx context.Context, s interface{}) (err error) { val = val.Elem() } - if val.Kind() != reflect.Struct || val.Type() == timeType { + if val.Kind() != reflect.Struct || val.Type().ConvertibleTo(timeType) { return &InvalidValidationError{Type: reflect.TypeOf(s)} } @@ -340,7 +417,7 @@ func (v *Validate) StructFilteredCtx(ctx context.Context, s interface{}, fn Filt val = val.Elem() } - if val.Kind() != reflect.Struct || val.Type() == timeType { + if val.Kind() != reflect.Struct || val.Type().ConvertibleTo(timeType) { return &InvalidValidationError{Type: reflect.TypeOf(s)} } @@ -388,7 +465,7 @@ func (v *Validate) StructPartialCtx(ctx context.Context, s interface{}, fields . val = val.Elem() } - if val.Kind() != reflect.Struct || val.Type() == timeType { + if val.Kind() != reflect.Struct || val.Type().ConvertibleTo(timeType) { return &InvalidValidationError{Type: reflect.TypeOf(s)} } @@ -409,7 +486,10 @@ func (v *Validate) StructPartialCtx(ctx context.Context, s interface{}, fields . if len(flds) > 0 { vd.misc = append(vd.misc[0:0], name...) - vd.misc = append(vd.misc, '.') + // Don't append empty name for unnamed structs + if len(vd.misc) != 0 { + vd.misc = append(vd.misc, '.') + } for _, s := range flds { @@ -475,7 +555,7 @@ func (v *Validate) StructExceptCtx(ctx context.Context, s interface{}, fields .. val = val.Elem() } - if val.Kind() != reflect.Struct || val.Type() == timeType { + if val.Kind() != reflect.Struct || val.Type().ConvertibleTo(timeType) { return &InvalidValidationError{Type: reflect.TypeOf(s)} } diff --git a/index/server/vendor/github.com/goccy/go-json/.codecov.yml b/index/server/vendor/github.com/goccy/go-json/.codecov.yml new file mode 100644 index 00000000..e9813457 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/.codecov.yml @@ -0,0 +1,32 @@ +codecov: + require_ci_to_pass: yes + +coverage: + precision: 2 + round: down + range: "70...100" + + status: + project: + default: + target: 70% + threshold: 2% + patch: off + changes: no + +parsers: + gcov: + branch_detection: + conditional: yes + loop: yes + method: no + macro: no + +comment: + layout: "header,diff" + behavior: default + require_changes: no + +ignore: + - internal/encoder/vm_color + - internal/encoder/vm_color_indent diff --git a/index/server/vendor/github.com/goccy/go-json/.gitignore b/index/server/vendor/github.com/goccy/go-json/.gitignore new file mode 100644 index 00000000..37828382 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/.gitignore @@ -0,0 +1,2 @@ +cover.html +cover.out diff --git a/index/server/vendor/github.com/goccy/go-json/.golangci.yml b/index/server/vendor/github.com/goccy/go-json/.golangci.yml new file mode 100644 index 00000000..57ae5a52 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/.golangci.yml @@ -0,0 +1,83 @@ +run: + skip-files: + - encode_optype.go + - ".*_test\\.go$" + +linters-settings: + govet: + enable-all: true + disable: + - shadow + +linters: + enable-all: true + disable: + - dogsled + - dupl + - exhaustive + - exhaustivestruct + - errorlint + - forbidigo + - funlen + - gci + - gochecknoglobals + - gochecknoinits + - gocognit + - gocritic + - gocyclo + - godot + - godox + - goerr113 + - gofumpt + - gomnd + - gosec + - ifshort + - lll + - makezero + - nakedret + - nestif + - nlreturn + - paralleltest + - testpackage + - thelper + - wrapcheck + - interfacer + - lll + - nakedret + - nestif + - nlreturn + - testpackage + - wsl + - varnamelen + - nilnil + - ireturn + - govet + - forcetypeassert + - cyclop + - containedctx + - revive + +issues: + exclude-rules: + # not needed + - path: /*.go + text: "ST1003: should not use underscores in package names" + linters: + - stylecheck + - path: /*.go + text: "don't use an underscore in package name" + linters: + - golint + - path: rtype.go + linters: + - golint + - stylecheck + - path: error.go + linters: + - staticcheck + + # Maximum issues count per one linter. Set to 0 to disable. Default is 50. + max-issues-per-linter: 0 + + # Maximum count of issues with the same text. Set to 0 to disable. Default is 3. + max-same-issues: 0 diff --git a/index/server/vendor/github.com/goccy/go-json/CHANGELOG.md b/index/server/vendor/github.com/goccy/go-json/CHANGELOG.md new file mode 100644 index 00000000..d63009fd --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/CHANGELOG.md @@ -0,0 +1,393 @@ +# v0.9.11 - 2022/08/18 + +### Fix bugs + +* Fix unexpected behavior when buffer ends with backslash ( #383 ) +* Fix stream decoding of escaped character ( #387 ) + +# v0.9.10 - 2022/07/15 + +### Fix bugs + +* Fix boundary exception of type caching ( #382 ) + +# v0.9.9 - 2022/07/15 + +### Fix bugs + +* Fix encoding of directed interface with typed nil ( #377 ) +* Fix embedded primitive type encoding using alias ( #378 ) +* Fix slice/array type encoding with types implementing MarshalJSON ( #379 ) +* Fix unicode decoding when the expected buffer state is not met after reading ( #380 ) + +# v0.9.8 - 2022/06/30 + +### Fix bugs + +* Fix decoding of surrogate-pair ( #365 ) +* Fix handling of embedded primitive type ( #366 ) +* Add validation of escape sequence for decoder ( #367 ) +* Fix stream tokenizing respecting UseNumber ( #369 ) +* Fix encoding when struct pointer type that implements Marshal JSON is embedded ( #375 ) + +### Improve performance + +* Improve performance of linkRecursiveCode ( #368 ) + +# v0.9.7 - 2022/04/22 + +### Fix bugs + +#### Encoder + +* Add filtering process for encoding on slow path ( #355 ) +* Fix encoding of interface{} with pointer type ( #363 ) + +#### Decoder + +* Fix map key decoder that implements UnmarshalJSON ( #353 ) +* Fix decoding of []uint8 type ( #361 ) + +### New features + +* Add DebugWith option for encoder ( #356 ) + +# v0.9.6 - 2022/03/22 + +### Fix bugs + +* Correct the handling of the minimum value of int type for decoder ( #344 ) +* Fix bugs of stream decoder's bufferSize ( #349 ) +* Add a guard to use typeptr more safely ( #351 ) + +### Improve decoder performance + +* Improve escapeString's performance ( #345 ) + +### Others + +* Update go version for CI ( #347 ) + +# v0.9.5 - 2022/03/04 + +### Fix bugs + +* Fix panic when decoding time.Time with context ( #328 ) +* Fix reading the next character in buffer to nul consideration ( #338 ) +* Fix incorrect handling on skipValue ( #341 ) + +### Improve decoder performance + +* Improve performance when a payload contains escape sequence ( #334 ) + +# v0.9.4 - 2022/01/21 + +* Fix IsNilForMarshaler for string type with omitempty ( #323 ) +* Fix the case where the embedded field is at the end ( #326 ) + +# v0.9.3 - 2022/01/14 + +* Fix logic of removing struct field for decoder ( #322 ) + +# v0.9.2 - 2022/01/14 + +* Add invalid decoder to delay type error judgment at decode ( #321 ) + +# v0.9.1 - 2022/01/11 + +* Fix encoding of MarshalText/MarshalJSON operation with head offset ( #319 ) + +# v0.9.0 - 2022/01/05 + +### New feature + +* Supports dynamic filtering of struct fields ( #314 ) + +### Improve encoding performance + +* Improve map encoding performance ( #310 ) +* Optimize encoding path for escaped string ( #311 ) +* Add encoding option for performance ( #312 ) + +### Fix bugs + +* Fix panic at encoding map value on 1.18 ( #310 ) +* Fix MarshalIndent for interface type ( #317 ) + +# v0.8.1 - 2021/12/05 + +* Fix operation conversion from PtrHead to Head in Recursive type ( #305 ) + +# v0.8.0 - 2021/12/02 + +* Fix embedded field conflict behavior ( #300 ) +* Refactor compiler for encoder ( #301 #302 ) + +# v0.7.10 - 2021/10/16 + +* Fix conversion from pointer to uint64 ( #294 ) + +# v0.7.9 - 2021/09/28 + +* Fix encoding of nil value about interface type that has method ( #291 ) + +# v0.7.8 - 2021/09/01 + +* Fix mapassign_faststr for indirect struct type ( #283 ) +* Fix encoding of not empty interface type ( #284 ) +* Fix encoding of empty struct interface type ( #286 ) + +# v0.7.7 - 2021/08/25 + +* Fix invalid utf8 on stream decoder ( #279 ) +* Fix buffer length bug on string stream decoder ( #280 ) + +Thank you @orisano !! + +# v0.7.6 - 2021/08/13 + +* Fix nil slice assignment ( #276 ) +* Improve error message ( #277 ) + +# v0.7.5 - 2021/08/12 + +* Fix encoding of embedded struct with tags ( #265 ) +* Fix encoding of embedded struct that isn't first field ( #272 ) +* Fix decoding of binary type with escaped char ( #273 ) + +# v0.7.4 - 2021/07/06 + +* Fix encoding of indirect layout structure ( #264 ) + +# v0.7.3 - 2021/06/29 + +* Fix encoding of pointer type in empty interface ( #262 ) + +# v0.7.2 - 2021/06/26 + +### Fix decoder + +* Add decoder for func type to fix decoding of nil function value ( #257 ) +* Fix stream decoding of []byte type ( #258 ) + +### Performance + +* Improve decoding performance of map[string]interface{} type ( use `mapassign_faststr` ) ( #256 ) +* Improve encoding performance of empty interface type ( remove recursive calling of `vm.Run` ) ( #259 ) + +### Benchmark + +* Add bytedance/sonic as benchmark target ( #254 ) + +# v0.7.1 - 2021/06/18 + +### Fix decoder + +* Fix error when unmarshal empty array ( #253 ) + +# v0.7.0 - 2021/06/12 + +### Support context for MarshalJSON and UnmarshalJSON ( #248 ) + +* json.MarshalContext(context.Context, interface{}, ...json.EncodeOption) ([]byte, error) +* json.NewEncoder(io.Writer).EncodeContext(context.Context, interface{}, ...json.EncodeOption) error +* json.UnmarshalContext(context.Context, []byte, interface{}, ...json.DecodeOption) error +* json.NewDecoder(io.Reader).DecodeContext(context.Context, interface{}) error + +```go +type MarshalerContext interface { + MarshalJSON(context.Context) ([]byte, error) +} + +type UnmarshalerContext interface { + UnmarshalJSON(context.Context, []byte) error +} +``` + +### Add DecodeFieldPriorityFirstWin option ( #242 ) + +In the default behavior, go-json, like encoding/json, will reflect the result of the last evaluation when a field with the same name exists. I've added new options to allow you to change this behavior. `json.DecodeFieldPriorityFirstWin` option reflects the result of the first evaluation if a field with the same name exists. This behavior has a performance advantage as it allows the subsequent strings to be skipped if all fields have been evaluated. + +### Fix encoder + +* Fix indent number contains recursive type ( #249 ) +* Fix encoding of using empty interface as map key ( #244 ) + +### Fix decoder + +* Fix decoding fields containing escaped characters ( #237 ) + +### Refactor + +* Move some tests to subdirectory ( #243 ) +* Refactor package layout for decoder ( #238 ) + +# v0.6.1 - 2021/06/02 + +### Fix encoder + +* Fix value of totalLength for encoding ( #236 ) + +# v0.6.0 - 2021/06/01 + +### Support Colorize option for encoding (#233) + +```go +b, err := json.MarshalWithOption(v, json.Colorize(json.DefaultColorScheme)) +if err != nil { + ... +} +fmt.Println(string(b)) // print colored json +``` + +### Refactor + +* Fix opcode layout - Adjust memory layout of the opcode to 128 bytes in a 64-bit environment ( #230 ) +* Refactor encode option ( #231 ) +* Refactor escape string ( #232 ) + +# v0.5.1 - 2021/5/20 + +### Optimization + +* Add type addrShift to enable bigger encoder/decoder cache ( #213 ) + +### Fix decoder + +* Keep original reference of slice element ( #229 ) + +### Refactor + +* Refactor Debug mode for encoding ( #226 ) +* Generate VM sources for encoding ( #227 ) +* Refactor validator for null/true/false for decoding ( #221 ) + +# v0.5.0 - 2021/5/9 + +### Supports using omitempty and string tags at the same time ( #216 ) + +### Fix decoder + +* Fix stream decoder for unicode char ( #215 ) +* Fix decoding of slice element ( #219 ) +* Fix calculating of buffer length for stream decoder ( #220 ) + +### Refactor + +* replace skipWhiteSpace goto by loop ( #212 ) + +# v0.4.14 - 2021/5/4 + +### Benchmark + +* Add valyala/fastjson to benchmark ( #193 ) +* Add benchmark task for CI ( #211 ) + +### Fix decoder + +* Fix decoding of slice with unmarshal json type ( #198 ) +* Fix decoding of null value for interface type that does not implement Unmarshaler ( #205 ) +* Fix decoding of null value to []byte by json.Unmarshal ( #206 ) +* Fix decoding of backslash char at the end of string ( #207 ) +* Fix stream decoder for null/true/false value ( #208 ) +* Fix stream decoder for slow reader ( #211 ) + +### Performance + +* If cap of slice is enough, reuse slice data for compatibility with encoding/json ( #200 ) + +# v0.4.13 - 2021/4/20 + +### Fix json.Compact and json.Indent + +* Support validation the input buffer for json.Compact and json.Indent ( #189 ) +* Optimize json.Compact and json.Indent ( improve memory footprint ) ( #190 ) + +# v0.4.12 - 2021/4/15 + +### Fix encoder + +* Fix unnecessary indent for empty slice type ( #181 ) +* Fix encoding of omitempty feature for the slice or interface type ( #183 ) +* Fix encoding custom types zero values with omitempty when marshaller exists ( #187 ) + +### Fix decoder + +* Fix decoder for invalid top level value ( #184 ) +* Fix decoder for invalid number value ( #185 ) + +# v0.4.11 - 2021/4/3 + +* Improve decoder performance for interface type + +# v0.4.10 - 2021/4/2 + +### Fix encoder + +* Fixed a bug when encoding slice and map containing recursive structures +* Fixed a logic to determine if indirect reference + +# v0.4.9 - 2021/3/29 + +### Add debug mode + +If you use `json.MarshalWithOption(v, json.Debug())` and `panic` occurred in `go-json`, produces debug information to console. + +### Support a new feature to compatible with encoding/json + +- invalid UTF-8 is coerced to valid UTF-8 ( without performance down ) + +### Fix encoder + +- Fixed handling of MarshalJSON of function type + +### Fix decoding of slice of pointer type + +If there is a pointer value, go-json will use it. (This behavior is necessary to achieve the ability to prioritize pre-filled values). However, since slices are reused internally, there was a bug that referred to the previous pointer value. Therefore, it is not necessary to refer to the pointer value in advance for the slice element, so we explicitly initialize slice element by `nil`. + +# v0.4.8 - 2021/3/21 + +### Reduce memory usage at compile time + +* go-json have used about 2GB of memory at compile time, but now it can compile with about less than 550MB. + +### Fix any encoder's bug + +* Add many test cases for encoder +* Fix composite type ( slice/array/map ) +* Fix pointer types +* Fix encoding of MarshalJSON or MarshalText or json.Number type + +### Refactor encoder + +* Change package layout for reducing memory usage at compile +* Remove anonymous and only operation +* Remove root property from encodeCompileContext and opcode + +### Fix CI + +* Add Go 1.16 +* Remove Go 1.13 +* Fix `make cover` task + +### Number/Delim/Token/RawMessage use the types defined in encoding/json by type alias + +# v0.4.7 - 2021/02/22 + +### Fix decoder + +* Fix decoding of deep recursive structure +* Fix decoding of embedded unexported pointer field +* Fix invalid test case +* Fix decoding of invalid value +* Fix decoding of prefilled value +* Fix not being able to return UnmarshalTypeError when it should be returned +* Fix decoding of null value +* Fix decoding of type of null string +* Use pre allocated pointer if exists it at decoding + +### Reduce memory usage at compile + +* Integrate int/int8/int16/int32/int64 and uint/uint8/uint16/uint32/uint64 operation to reduce memory usage at compile + +### Remove unnecessary optype diff --git a/index/server/vendor/github.com/goccy/go-json/LICENSE b/index/server/vendor/github.com/goccy/go-json/LICENSE new file mode 100644 index 00000000..6449c8bf --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Masaaki Goshima + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/index/server/vendor/github.com/goccy/go-json/Makefile b/index/server/vendor/github.com/goccy/go-json/Makefile new file mode 100644 index 00000000..363563ab --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/Makefile @@ -0,0 +1,39 @@ +PKG := github.com/goccy/go-json + +BIN_DIR := $(CURDIR)/bin +PKGS := $(shell go list ./... | grep -v internal/cmd|grep -v test) +COVER_PKGS := $(foreach pkg,$(PKGS),$(subst $(PKG),.,$(pkg))) + +COMMA := , +EMPTY := +SPACE := $(EMPTY) $(EMPTY) +COVERPKG_OPT := $(subst $(SPACE),$(COMMA),$(COVER_PKGS)) + +$(BIN_DIR): + @mkdir -p $(BIN_DIR) + +.PHONY: cover +cover: + go test -coverpkg=$(COVERPKG_OPT) -coverprofile=cover.out ./... + +.PHONY: cover-html +cover-html: cover + go tool cover -html=cover.out + +.PHONY: lint +lint: golangci-lint + golangci-lint run + +golangci-lint: | $(BIN_DIR) + @{ \ + set -e; \ + GOLANGCI_LINT_TMP_DIR=$$(mktemp -d); \ + cd $$GOLANGCI_LINT_TMP_DIR; \ + go mod init tmp; \ + GOBIN=$(BIN_DIR) go get github.com/golangci/golangci-lint/cmd/golangci-lint@v1.36.0; \ + rm -rf $$GOLANGCI_LINT_TMP_DIR; \ + } + +.PHONY: generate +generate: + go generate ./internal/... diff --git a/index/server/vendor/github.com/goccy/go-json/README.md b/index/server/vendor/github.com/goccy/go-json/README.md new file mode 100644 index 00000000..56862377 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/README.md @@ -0,0 +1,529 @@ +# go-json + +![Go](https://github.com/goccy/go-json/workflows/Go/badge.svg) +[![GoDoc](https://godoc.org/github.com/goccy/go-json?status.svg)](https://pkg.go.dev/github.com/goccy/go-json?tab=doc) +[![codecov](https://codecov.io/gh/goccy/go-json/branch/master/graph/badge.svg)](https://codecov.io/gh/goccy/go-json) + +Fast JSON encoder/decoder compatible with encoding/json for Go + + + +# Roadmap + +``` +* version ( expected release date ) + +* v0.9.0 + | + | while maintaining compatibility with encoding/json, we will add convenient APIs + | + v +* v1.0.0 +``` + +We are accepting requests for features that will be implemented between v0.9.0 and v.1.0.0. +If you have the API you need, please submit your issue [here](https://github.com/goccy/go-json/issues). + +# Features + +- Drop-in replacement of `encoding/json` +- Fast ( See [Benchmark section](https://github.com/goccy/go-json#benchmarks) ) +- Flexible customization with options +- Coloring the encoded string +- Can propagate context.Context to `MarshalJSON` or `UnmarshalJSON` +- Can dynamically filter the fields of the structure type-safely + +# Installation + +``` +go get github.com/goccy/go-json +``` + +# How to use + +Replace import statement from `encoding/json` to `github.com/goccy/go-json` + +``` +-import "encoding/json" ++import "github.com/goccy/go-json" +``` + +# JSON library comparison + +| name | encoder | decoder | compatible with `encoding/json` | +| :----: | :------: | :-----: | :-----------------------------: | +| encoding/json | yes | yes | N/A | +| [json-iterator/go](https://github.com/json-iterator/go) | yes | yes | partial | +| [easyjson](https://github.com/mailru/easyjson) | yes | yes | no | +| [gojay](https://github.com/francoispqt/gojay) | yes | yes | no | +| [segmentio/encoding/json](https://github.com/segmentio/encoding/tree/master/json) | yes | yes | partial | +| [jettison](https://github.com/wI2L/jettison) | yes | no | no | +| [simdjson-go](https://github.com/minio/simdjson-go) | no | yes | no | +| goccy/go-json | yes | yes | yes | + +- `json-iterator/go` isn't compatible with `encoding/json` in many ways (e.g. https://github.com/json-iterator/go/issues/229 ), but it hasn't been supported for a long time. +- `segmentio/encoding/json` is well supported for encoders, but some are not supported for decoder APIs such as `Token` ( streaming decode ) + +## Other libraries + +- [jingo](https://github.com/bet365/jingo) + +I tried the benchmark but it didn't work. +Also, it seems to panic when it receives an unexpected value because there is no error handling... + +- [ffjson](https://github.com/pquerna/ffjson) + +Benchmarking gave very slow results. +It seems that it is assumed that the user will use the buffer pool properly. +Also, development seems to have already stopped + +# Benchmarks + +``` +$ cd benchmarks +$ go test -bench . +``` + +## Encode + + + + +## Decode + + + + + + +# Fuzzing + +[go-json-fuzz](https://github.com/goccy/go-json-fuzz) is the repository for fuzzing tests. +If you run the test in this repository and find a bug, please commit to corpus to go-json-fuzz and report the issue to [go-json](https://github.com/goccy/go-json/issues). + +# How it works + +`go-json` is very fast in both encoding and decoding compared to other libraries. +It's easier to implement by using automatic code generation for performance or by using a dedicated interface, but `go-json` dares to stick to compatibility with `encoding/json` and is the simple interface. Despite this, we are developing with the aim of being the fastest library. + +Here, we explain the various speed-up techniques implemented by `go-json`. + +## Basic technique + +The techniques listed here are the ones used by most of the libraries listed above. + +### Buffer reuse + +Since the only value required for the result of `json.Marshal(interface{}) ([]byte, error)` is `[]byte`, the only value that must be allocated during encoding is the return value `[]byte` . + +Also, as the number of allocations increases, the performance will be affected, so the number of allocations should be kept as low as possible when creating `[]byte`. + +Therefore, there is a technique to reduce the number of times a new buffer must be allocated by reusing the buffer used for the previous encoding by using `sync.Pool`. + +Finally, you allocate a buffer that is as long as the resulting buffer and copy the contents into it, you only need to allocate the buffer once in theory. + +```go +type buffer struct { + data []byte +} + +var bufPool = sync.Pool{ + New: func() interface{} { + return &buffer{data: make([]byte, 0, 1024)} + }, +} + +buf := bufPool.Get().(*buffer) +data := encode(buf.data) // reuse buf.data + +newBuf := make([]byte, len(data)) +copy(newBuf, buf) + +buf.data = data +bufPool.Put(buf) +``` + +### Elimination of reflection + +As you know, the reflection operation is very slow. + +Therefore, using the fact that the address position where the type information is stored is fixed for each binary ( we call this `typeptr` ), +we can use the address in the type information to call a pre-built optimized process. + +For example, you can get the address to the type information from `interface{}` as follows and you can use that information to call a process that does not have reflection. + +To process without reflection, pass a pointer (`unsafe.Pointer`) to the value is stored. + +```go + +type emptyInterface struct { + typ unsafe.Pointer + ptr unsafe.Pointer +} + +var typeToEncoder = map[uintptr]func(unsafe.Pointer)([]byte, error){} + +func Marshal(v interface{}) ([]byte, error) { + iface := (*emptyInterface)(unsafe.Pointer(&v) + typeptr := uintptr(iface.typ) + if enc, exists := typeToEncoder[typeptr]; exists { + return enc(iface.ptr) + } + ... +} +``` + +※ In reality, `typeToEncoder` can be referenced by multiple goroutines, so exclusive control is required. + +## Unique speed-up technique + +## Encoder + +### Do not escape arguments of `Marshal` + +`json.Marshal` and `json.Unmarshal` receive `interface{}` value and they perform type determination dynamically to process. +In normal case, you need to use the `reflect` library to determine the type dynamically, but since `reflect.Type` is defined as `interface`, when you call the method of `reflect.Type`, The reflect's argument is escaped. + +Therefore, the arguments for `Marshal` and `Unmarshal` are always escape to the heap. +However, `go-json` can use the feature of `reflect.Type` while avoiding escaping. + +`reflect.Type` is defined as `interface`, but in reality `reflect.Type` is implemented only by the structure `rtype` defined in the `reflect` package. +For this reason, to date `reflect.Type` is the same as `*reflect.rtype`. + +Therefore, by directly handling `*reflect.rtype`, which is an implementation of `reflect.Type`, it is possible to avoid escaping because it changes from `interface` to using `struct`. + +The technique for working with `*reflect.rtype` directly from `go-json` is implemented at [rtype.go](https://github.com/goccy/go-json/blob/master/internal/runtime/rtype.go) + +Also, the same technique is cut out as a library ( https://github.com/goccy/go-reflect ) + +Initially this feature was the default behavior of `go-json`. +But after careful testing, I found that I passed a large value to `json.Marshal()` and if the argument could not be assigned to the stack, it could not be properly escaped to the heap (a bug in the Go compiler). + +Therefore, this feature will be provided as an **optional** until this issue is resolved. + +To use it, add `NoEscape` like `MarshalNoEscape()` + +### Encoding using opcode sequence + +I explained that you can use `typeptr` to call a pre-built process from type information. + +In other libraries, this dedicated process is processed by making it an function calling like anonymous function, but function calls are inherently slow processes and should be avoided as much as possible. + +Therefore, `go-json` adopted the Instruction-based execution processing system, which is also used to implement virtual machines for programming language. + +If it is the first type to encode, create the opcode ( instruction ) sequence required for encoding. +From the second time onward, use `typeptr` to get the cached pre-built opcode sequence and encode it based on it. An example of the opcode sequence is shown below. + +```go +json.Marshal(struct{ + X int `json:"x"` + Y string `json:"y"` +}{X: 1, Y: "hello"}) +``` + +When encoding a structure like the one above, create a sequence of opcodes like this: + +``` +- opStructFieldHead ( `{` ) +- opStructFieldInt ( `"x": 1,` ) +- opStructFieldString ( `"y": "hello"` ) +- opStructEnd ( `}` ) +- opEnd +``` + +※ When processing each operation, write the letters on the right. + +In addition, each opcode is managed by the following structure ( +Pseudo code ). + +```go +type opType int +const ( + opStructFieldHead opType = iota + opStructFieldInt + opStructFieldStirng + opStructEnd + opEnd +) +type opcode struct { + op opType + key []byte + next *opcode +} +``` + +The process of encoding using the opcode sequence is roughly implemented as follows. + +```go +func encode(code *opcode, b []byte, p unsafe.Pointer) ([]byte, error) { + for { + switch code.op { + case opStructFieldHead: + b = append(b, '{') + code = code.next + case opStructFieldInt: + b = append(b, code.key...) + b = appendInt((*int)(unsafe.Pointer(uintptr(p)+code.offset))) + code = code.next + case opStructFieldString: + b = append(b, code.key...) + b = appendString((*string)(unsafe.Pointer(uintptr(p)+code.offset))) + code = code.next + case opStructEnd: + b = append(b, '}') + code = code.next + case opEnd: + goto END + } + } +END: + return b, nil +} +``` + +In this way, the huge `switch-case` is used to encode by manipulating the linked list opcodes to avoid unnecessary function calls. + +### Opcode sequence optimization + +One of the advantages of encoding using the opcode sequence is the ease of optimization. +The opcode sequence mentioned above is actually converted into the following optimized operations and used. + +``` +- opStructFieldHeadInt ( `{"x": 1,` ) +- opStructEndString ( `"y": "hello"}` ) +- opEnd +``` + +It has been reduced from 5 opcodes to 3 opcodes ! +Reducing the number of opcodees means reducing the number of branches with `switch-case`. +In other words, the closer the number of operations is to 1, the faster the processing can be performed. + +In `go-json`, optimization to reduce the number of opcodes itself like the above and it speeds up by preparing opcodes with optimized paths. + +### Change recursive call from CALL to JMP + +Recursive processing is required during encoding if the type is defined recursively as follows: + +```go +type T struct { + X int + U *U +} + +type U struct { + T *T +} + +b, err := json.Marshal(&T{ + X: 1, + U: &U{ + T: &T{ + X: 2, + }, + }, +}) +fmt.Println(string(b)) // {"X":1,"U":{"T":{"X":2,"U":null}}} +``` + +In `go-json`, recursive processing is processed by the operation type of ` opStructFieldRecursive`. + +In this operation, after acquiring the opcode sequence used for recursive processing, the function is **not** called recursively as it is, but the necessary values ​​are saved by itself and implemented by moving to the next operation. + +The technique of implementing recursive processing with the `JMP` operation while avoiding the `CALL` operation is a famous technique for implementing a high-speed virtual machine. + +For more details, please refer to [the article](https://engineering.mercari.com/blog/entry/1599563768-081104c850) ( but Japanese only ). + +### Dispatch by typeptr from map to slice + +When retrieving the data cached from the type information by `typeptr`, we usually use map. +Map requires exclusive control, so use `sync.Map` for a naive implementation. + +However, this is slow, so it's a good idea to use the `atomic` package for exclusive control as implemented by `segmentio/encoding/json` ( https://github.com/segmentio/encoding/blob/master/json/codec.go#L41-L55 ). + +This implementation slows down the set instead of speeding up the get, but it works well because of the nature of the library, it encodes much more for the same type. + +However, as a result of profiling, I noticed that `runtime.mapaccess2` accounts for a significant percentage of the execution time. So I thought if I could change the lookup from map to slice. + +There is an API named `typelinks` defined in the `runtime` package that the `reflect` package uses internally. +This allows you to get all the type information defined in the binary at runtime. + +The fact that all type information can be acquired means that by constructing slices in advance with the acquired total number of type information, it is possible to look up with the value of `typeptr` without worrying about out-of-range access. + +However, if there is too much type information, it will use a lot of memory, so by default we will only use this optimization if the slice size fits within **2Mib** . + +If this approach is not available, it will fall back to the `atomic` based process described above. + +If you want to know more, please refer to the implementation [here](https://github.com/goccy/go-json/blob/master/internal/runtime/type.go#L36-L100) + +## Decoder + +### Dispatch by typeptr from map to slice + +Like the encoder, the decoder also uses typeptr to call the dedicated process. + +### Faster termination character inspection using NUL character + +In order to decode, you have to traverse the input buffer character by position. +At that time, if you check whether the buffer has reached the end, it will be very slow. + +`buf` : `[]byte` type variable. holds the string passed to the decoder +`cursor` : `int64` type variable. holds the current read position + +```go +buflen := len(buf) +for ; cursor < buflen; cursor++ { // compare cursor and buflen at all times, it is so slow. + switch buf[cursor] { + case ' ', '\n', '\r', '\t': + } +} +``` + +Therefore, by adding the `NUL` (`\000`) character to the end of the read buffer as shown below, it is possible to check the termination character at the same time as other characters. + +```go +for { + switch buf[cursor] { + case ' ', '\n', '\r', '\t': + case '\000': + return nil + } + cursor++ +} +``` + +### Use Boundary Check Elimination + +Due to the `NUL` character optimization, the Go compiler does a boundary check every time, even though `buf[cursor]` does not cause out-of-range access. + +Therefore, `go-json` eliminates boundary check by fetching characters for hotspot by pointer operation. For example, the following code. + +```go +func char(ptr unsafe.Pointer, offset int64) byte { + return *(*byte)(unsafe.Pointer(uintptr(ptr) + uintptr(offset))) +} + +p := (*sliceHeader)(&unsafe.Pointer(buf)).data +for { + switch char(p, cursor) { + case ' ', '\n', '\r', '\t': + case '\000': + return nil + } + cursor++ +} +``` + +### Checking the existence of fields of struct using Bitmaps + +I found by the profiling result, in the struct decode, lookup process for field was taking a long time. + +For example, consider decoding a string like `{"a":1,"b":2,"c":3}` into the following structure: + +```go +type T struct { + A int `json:"a"` + B int `json:"b"` + C int `json:"c"` +} +``` + +At this time, it was found that it takes a lot of time to acquire the decoding process corresponding to the field from the field name as shown below during the decoding process. + +```go +fieldName := decodeKey(buf, cursor) // "a" or "b" or "c" +decoder, exists := fieldToDecoderMap[fieldName] // so slow +if exists { + decoder(buf, cursor) +} else { + skipValue(buf, cursor) +} +``` + +To improve this process, `json-iterator/go` is optimized so that it can be branched by switch-case when the number of fields in the structure is 10 or less (switch-case is faster than map). However, there is a risk of hash collision because the value hashed by the FNV algorithm is used for conditional branching. Also, `gojay` processes this part at high speed by letting the library user yourself write `switch-case`. + + +`go-json` considers and implements a new approach that is different from these. I call this **bitmap field optimization**. + +The range of values ​​per character can be represented by `[256]byte`. Also, if the number of fields in the structure is 8 or less, `int8` type can represent the state of each field. +In other words, it has the following structure. + +- Base ( 8bit ): `00000000` +- Key "a": `00000001` ( assign key "a" to the first bit ) +- Key "b": `00000010` ( assign key "b" to the second bit ) +- Key "c": `00000100` ( assign key "c" to the third bit ) + +Bitmap structure is the following + +``` + | key index(0) | +------------------------ + 0 | 00000000 | + 1 | 00000000 | +~~ | | +97 (a) | 00000001 | +98 (b) | 00000010 | +99 (c) | 00000100 | +~~ | | +255 | 00000000 | +``` + +You can think of this as a Bitmap with a height of `256` and a width of the maximum string length in the field name. +In other words, it can be represented by the following type . + +```go +[maxFieldKeyLength][256]int8 +``` + +When decoding a field character, check whether the corresponding character exists by referring to the pre-built bitmap like the following. + +```go +var curBit int8 = math.MaxInt8 // 11111111 + +c := char(buf, cursor) +bit := bitmap[keyIdx][c] +curBit &= bit +if curBit == 0 { + // not found field +} +``` + +If `curBit` is not `0` until the end of the field string, then the string is +You may have hit one of the fields. +But the possibility is that if the decoded string is shorter than the field string, you will get a false hit. + +- input: `{"a":1}` +```go +type T struct { + X int `json:"abc"` +} +``` +※ Since `a` is shorter than `abc`, it can decode to the end of the field character without `curBit` being 0. + +Rest assured. In this case, it doesn't matter because you can tell if you hit by comparing the string length of `a` with the string length of `abc`. + +Finally, calculate the position of the bit where `1` is set and get the corresponding value, and you're done. + +Using this technique, field lookups are possible with only bitwise operations and access to slices. + +`go-json` uses a similar technique for fields with 9 or more and 16 or less fields. At this time, Bitmap is constructed as `[maxKeyLen][256]int16` type. + +Currently, this optimization is not performed when the maximum length of the field name is long (specifically, 64 bytes or more) in addition to the limitation of the number of fields from the viewpoint of saving memory usage. + +### Others + +I have done a lot of other optimizations. I will find time to write about them. If you have any questions about what's written here or other optimizations, please visit the `#go-json` channel on `gophers.slack.com` . + +## Reference + +Regarding the story of go-json, there are the following articles in Japanese only. + +- https://speakerdeck.com/goccy/zui-su-falsejsonraiburariwoqiu-mete +- https://engineering.mercari.com/blog/entry/1599563768-081104c850/ + +# Looking for Sponsors + +I'm looking for sponsors this library. This library is being developed as a personal project in my spare time. If you want a quick response or problem resolution when using this library in your project, please register as a [sponsor](https://github.com/sponsors/goccy). I will cooperate as much as possible. Of course, this library is developed as an MIT license, so you can use it freely for free. + +# License + +MIT diff --git a/index/server/vendor/github.com/goccy/go-json/color.go b/index/server/vendor/github.com/goccy/go-json/color.go new file mode 100644 index 00000000..e80b22b4 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/color.go @@ -0,0 +1,68 @@ +package json + +import ( + "fmt" + + "github.com/goccy/go-json/internal/encoder" +) + +type ( + ColorFormat = encoder.ColorFormat + ColorScheme = encoder.ColorScheme +) + +const escape = "\x1b" + +type colorAttr int + +//nolint:deadcode,varcheck +const ( + fgBlackColor colorAttr = iota + 30 + fgRedColor + fgGreenColor + fgYellowColor + fgBlueColor + fgMagentaColor + fgCyanColor + fgWhiteColor +) + +//nolint:deadcode,varcheck +const ( + fgHiBlackColor colorAttr = iota + 90 + fgHiRedColor + fgHiGreenColor + fgHiYellowColor + fgHiBlueColor + fgHiMagentaColor + fgHiCyanColor + fgHiWhiteColor +) + +func createColorFormat(attr colorAttr) ColorFormat { + return ColorFormat{ + Header: wrapColor(attr), + Footer: resetColor(), + } +} + +func wrapColor(attr colorAttr) string { + return fmt.Sprintf("%s[%dm", escape, attr) +} + +func resetColor() string { + return wrapColor(colorAttr(0)) +} + +var ( + DefaultColorScheme = &ColorScheme{ + Int: createColorFormat(fgHiMagentaColor), + Uint: createColorFormat(fgHiMagentaColor), + Float: createColorFormat(fgHiMagentaColor), + Bool: createColorFormat(fgHiYellowColor), + String: createColorFormat(fgHiGreenColor), + Binary: createColorFormat(fgHiRedColor), + ObjectKey: createColorFormat(fgHiCyanColor), + Null: createColorFormat(fgBlueColor), + } +) diff --git a/index/server/vendor/github.com/goccy/go-json/decode.go b/index/server/vendor/github.com/goccy/go-json/decode.go new file mode 100644 index 00000000..d99749d0 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/decode.go @@ -0,0 +1,232 @@ +package json + +import ( + "context" + "fmt" + "io" + "reflect" + "unsafe" + + "github.com/goccy/go-json/internal/decoder" + "github.com/goccy/go-json/internal/errors" + "github.com/goccy/go-json/internal/runtime" +) + +type Decoder struct { + s *decoder.Stream +} + +const ( + nul = '\000' +) + +type emptyInterface struct { + typ *runtime.Type + ptr unsafe.Pointer +} + +func unmarshal(data []byte, v interface{}, optFuncs ...DecodeOptionFunc) error { + src := make([]byte, len(data)+1) // append nul byte to the end + copy(src, data) + + header := (*emptyInterface)(unsafe.Pointer(&v)) + + if err := validateType(header.typ, uintptr(header.ptr)); err != nil { + return err + } + dec, err := decoder.CompileToGetDecoder(header.typ) + if err != nil { + return err + } + ctx := decoder.TakeRuntimeContext() + ctx.Buf = src + ctx.Option.Flags = 0 + for _, optFunc := range optFuncs { + optFunc(ctx.Option) + } + cursor, err := dec.Decode(ctx, 0, 0, header.ptr) + if err != nil { + decoder.ReleaseRuntimeContext(ctx) + return err + } + decoder.ReleaseRuntimeContext(ctx) + return validateEndBuf(src, cursor) +} + +func unmarshalContext(ctx context.Context, data []byte, v interface{}, optFuncs ...DecodeOptionFunc) error { + src := make([]byte, len(data)+1) // append nul byte to the end + copy(src, data) + + header := (*emptyInterface)(unsafe.Pointer(&v)) + + if err := validateType(header.typ, uintptr(header.ptr)); err != nil { + return err + } + dec, err := decoder.CompileToGetDecoder(header.typ) + if err != nil { + return err + } + rctx := decoder.TakeRuntimeContext() + rctx.Buf = src + rctx.Option.Flags = 0 + rctx.Option.Flags |= decoder.ContextOption + rctx.Option.Context = ctx + for _, optFunc := range optFuncs { + optFunc(rctx.Option) + } + cursor, err := dec.Decode(rctx, 0, 0, header.ptr) + if err != nil { + decoder.ReleaseRuntimeContext(rctx) + return err + } + decoder.ReleaseRuntimeContext(rctx) + return validateEndBuf(src, cursor) +} + +func unmarshalNoEscape(data []byte, v interface{}, optFuncs ...DecodeOptionFunc) error { + src := make([]byte, len(data)+1) // append nul byte to the end + copy(src, data) + + header := (*emptyInterface)(unsafe.Pointer(&v)) + + if err := validateType(header.typ, uintptr(header.ptr)); err != nil { + return err + } + dec, err := decoder.CompileToGetDecoder(header.typ) + if err != nil { + return err + } + + ctx := decoder.TakeRuntimeContext() + ctx.Buf = src + ctx.Option.Flags = 0 + for _, optFunc := range optFuncs { + optFunc(ctx.Option) + } + cursor, err := dec.Decode(ctx, 0, 0, noescape(header.ptr)) + if err != nil { + decoder.ReleaseRuntimeContext(ctx) + return err + } + decoder.ReleaseRuntimeContext(ctx) + return validateEndBuf(src, cursor) +} + +func validateEndBuf(src []byte, cursor int64) error { + for { + switch src[cursor] { + case ' ', '\t', '\n', '\r': + cursor++ + continue + case nul: + return nil + } + return errors.ErrSyntax( + fmt.Sprintf("invalid character '%c' after top-level value", src[cursor]), + cursor+1, + ) + } +} + +//nolint:staticcheck +//go:nosplit +func noescape(p unsafe.Pointer) unsafe.Pointer { + x := uintptr(p) + return unsafe.Pointer(x ^ 0) +} + +func validateType(typ *runtime.Type, p uintptr) error { + if typ == nil || typ.Kind() != reflect.Ptr || p == 0 { + return &InvalidUnmarshalError{Type: runtime.RType2Type(typ)} + } + return nil +} + +// NewDecoder returns a new decoder that reads from r. +// +// The decoder introduces its own buffering and may +// read data from r beyond the JSON values requested. +func NewDecoder(r io.Reader) *Decoder { + s := decoder.NewStream(r) + return &Decoder{ + s: s, + } +} + +// Buffered returns a reader of the data remaining in the Decoder's +// buffer. The reader is valid until the next call to Decode. +func (d *Decoder) Buffered() io.Reader { + return d.s.Buffered() +} + +// Decode reads the next JSON-encoded value from its +// input and stores it in the value pointed to by v. +// +// See the documentation for Unmarshal for details about +// the conversion of JSON into a Go value. +func (d *Decoder) Decode(v interface{}) error { + return d.DecodeWithOption(v) +} + +// DecodeContext reads the next JSON-encoded value from its +// input and stores it in the value pointed to by v with context.Context. +func (d *Decoder) DecodeContext(ctx context.Context, v interface{}) error { + d.s.Option.Flags |= decoder.ContextOption + d.s.Option.Context = ctx + return d.DecodeWithOption(v) +} + +func (d *Decoder) DecodeWithOption(v interface{}, optFuncs ...DecodeOptionFunc) error { + header := (*emptyInterface)(unsafe.Pointer(&v)) + typ := header.typ + ptr := uintptr(header.ptr) + typeptr := uintptr(unsafe.Pointer(typ)) + // noescape trick for header.typ ( reflect.*rtype ) + copiedType := *(**runtime.Type)(unsafe.Pointer(&typeptr)) + + if err := validateType(copiedType, ptr); err != nil { + return err + } + + dec, err := decoder.CompileToGetDecoder(typ) + if err != nil { + return err + } + if err := d.s.PrepareForDecode(); err != nil { + return err + } + s := d.s + for _, optFunc := range optFuncs { + optFunc(s.Option) + } + if err := dec.DecodeStream(s, 0, header.ptr); err != nil { + return err + } + s.Reset() + return nil +} + +func (d *Decoder) More() bool { + return d.s.More() +} + +func (d *Decoder) Token() (Token, error) { + return d.s.Token() +} + +// DisallowUnknownFields causes the Decoder to return an error when the destination +// is a struct and the input contains object keys which do not match any +// non-ignored, exported fields in the destination. +func (d *Decoder) DisallowUnknownFields() { + d.s.DisallowUnknownFields = true +} + +func (d *Decoder) InputOffset() int64 { + return d.s.TotalOffset() +} + +// UseNumber causes the Decoder to unmarshal a number into an interface{} as a +// Number instead of as a float64. +func (d *Decoder) UseNumber() { + d.s.UseNumber = true +} diff --git a/index/server/vendor/github.com/goccy/go-json/docker-compose.yml b/index/server/vendor/github.com/goccy/go-json/docker-compose.yml new file mode 100644 index 00000000..db40c79a --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/docker-compose.yml @@ -0,0 +1,13 @@ +version: '2' +services: + go-json: + image: golang:1.18 + volumes: + - '.:/go/src/go-json' + deploy: + resources: + limits: + memory: 620M + working_dir: /go/src/go-json + command: | + sh -c "go test -c . && ls go-json.test" diff --git a/index/server/vendor/github.com/goccy/go-json/encode.go b/index/server/vendor/github.com/goccy/go-json/encode.go new file mode 100644 index 00000000..4bd899f3 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/encode.go @@ -0,0 +1,326 @@ +package json + +import ( + "context" + "io" + "os" + "unsafe" + + "github.com/goccy/go-json/internal/encoder" + "github.com/goccy/go-json/internal/encoder/vm" + "github.com/goccy/go-json/internal/encoder/vm_color" + "github.com/goccy/go-json/internal/encoder/vm_color_indent" + "github.com/goccy/go-json/internal/encoder/vm_indent" +) + +// An Encoder writes JSON values to an output stream. +type Encoder struct { + w io.Writer + enabledIndent bool + enabledHTMLEscape bool + prefix string + indentStr string +} + +// NewEncoder returns a new encoder that writes to w. +func NewEncoder(w io.Writer) *Encoder { + return &Encoder{w: w, enabledHTMLEscape: true} +} + +// Encode writes the JSON encoding of v to the stream, followed by a newline character. +// +// See the documentation for Marshal for details about the conversion of Go values to JSON. +func (e *Encoder) Encode(v interface{}) error { + return e.EncodeWithOption(v) +} + +// EncodeWithOption call Encode with EncodeOption. +func (e *Encoder) EncodeWithOption(v interface{}, optFuncs ...EncodeOptionFunc) error { + ctx := encoder.TakeRuntimeContext() + ctx.Option.Flag = 0 + + err := e.encodeWithOption(ctx, v, optFuncs...) + + encoder.ReleaseRuntimeContext(ctx) + return err +} + +// EncodeContext call Encode with context.Context and EncodeOption. +func (e *Encoder) EncodeContext(ctx context.Context, v interface{}, optFuncs ...EncodeOptionFunc) error { + rctx := encoder.TakeRuntimeContext() + rctx.Option.Flag = 0 + rctx.Option.Flag |= encoder.ContextOption + rctx.Option.Context = ctx + + err := e.encodeWithOption(rctx, v, optFuncs...) + + encoder.ReleaseRuntimeContext(rctx) + return err +} + +func (e *Encoder) encodeWithOption(ctx *encoder.RuntimeContext, v interface{}, optFuncs ...EncodeOptionFunc) error { + if e.enabledHTMLEscape { + ctx.Option.Flag |= encoder.HTMLEscapeOption + } + ctx.Option.Flag |= encoder.NormalizeUTF8Option + ctx.Option.DebugOut = os.Stdout + for _, optFunc := range optFuncs { + optFunc(ctx.Option) + } + var ( + buf []byte + err error + ) + if e.enabledIndent { + buf, err = encodeIndent(ctx, v, e.prefix, e.indentStr) + } else { + buf, err = encode(ctx, v) + } + if err != nil { + return err + } + if e.enabledIndent { + buf = buf[:len(buf)-2] + } else { + buf = buf[:len(buf)-1] + } + buf = append(buf, '\n') + if _, err := e.w.Write(buf); err != nil { + return err + } + return nil +} + +// SetEscapeHTML specifies whether problematic HTML characters should be escaped inside JSON quoted strings. +// The default behavior is to escape &, <, and > to \u0026, \u003c, and \u003e to avoid certain safety problems that can arise when embedding JSON in HTML. +// +// In non-HTML settings where the escaping interferes with the readability of the output, SetEscapeHTML(false) disables this behavior. +func (e *Encoder) SetEscapeHTML(on bool) { + e.enabledHTMLEscape = on +} + +// SetIndent instructs the encoder to format each subsequent encoded value as if indented by the package-level function Indent(dst, src, prefix, indent). +// Calling SetIndent("", "") disables indentation. +func (e *Encoder) SetIndent(prefix, indent string) { + if prefix == "" && indent == "" { + e.enabledIndent = false + return + } + e.prefix = prefix + e.indentStr = indent + e.enabledIndent = true +} + +func marshalContext(ctx context.Context, v interface{}, optFuncs ...EncodeOptionFunc) ([]byte, error) { + rctx := encoder.TakeRuntimeContext() + rctx.Option.Flag = 0 + rctx.Option.Flag = encoder.HTMLEscapeOption | encoder.NormalizeUTF8Option | encoder.ContextOption + rctx.Option.Context = ctx + for _, optFunc := range optFuncs { + optFunc(rctx.Option) + } + + buf, err := encode(rctx, v) + if err != nil { + encoder.ReleaseRuntimeContext(rctx) + return nil, err + } + + // this line exists to escape call of `runtime.makeslicecopy` . + // if use `make([]byte, len(buf)-1)` and `copy(copied, buf)`, + // dst buffer size and src buffer size are differrent. + // in this case, compiler uses `runtime.makeslicecopy`, but it is slow. + buf = buf[:len(buf)-1] + copied := make([]byte, len(buf)) + copy(copied, buf) + + encoder.ReleaseRuntimeContext(rctx) + return copied, nil +} + +func marshal(v interface{}, optFuncs ...EncodeOptionFunc) ([]byte, error) { + ctx := encoder.TakeRuntimeContext() + + ctx.Option.Flag = 0 + ctx.Option.Flag |= (encoder.HTMLEscapeOption | encoder.NormalizeUTF8Option) + for _, optFunc := range optFuncs { + optFunc(ctx.Option) + } + + buf, err := encode(ctx, v) + if err != nil { + encoder.ReleaseRuntimeContext(ctx) + return nil, err + } + + // this line exists to escape call of `runtime.makeslicecopy` . + // if use `make([]byte, len(buf)-1)` and `copy(copied, buf)`, + // dst buffer size and src buffer size are differrent. + // in this case, compiler uses `runtime.makeslicecopy`, but it is slow. + buf = buf[:len(buf)-1] + copied := make([]byte, len(buf)) + copy(copied, buf) + + encoder.ReleaseRuntimeContext(ctx) + return copied, nil +} + +func marshalNoEscape(v interface{}) ([]byte, error) { + ctx := encoder.TakeRuntimeContext() + + ctx.Option.Flag = 0 + ctx.Option.Flag |= (encoder.HTMLEscapeOption | encoder.NormalizeUTF8Option) + + buf, err := encodeNoEscape(ctx, v) + if err != nil { + encoder.ReleaseRuntimeContext(ctx) + return nil, err + } + + // this line exists to escape call of `runtime.makeslicecopy` . + // if use `make([]byte, len(buf)-1)` and `copy(copied, buf)`, + // dst buffer size and src buffer size are differrent. + // in this case, compiler uses `runtime.makeslicecopy`, but it is slow. + buf = buf[:len(buf)-1] + copied := make([]byte, len(buf)) + copy(copied, buf) + + encoder.ReleaseRuntimeContext(ctx) + return copied, nil +} + +func marshalIndent(v interface{}, prefix, indent string, optFuncs ...EncodeOptionFunc) ([]byte, error) { + ctx := encoder.TakeRuntimeContext() + + ctx.Option.Flag = 0 + ctx.Option.Flag |= (encoder.HTMLEscapeOption | encoder.NormalizeUTF8Option | encoder.IndentOption) + for _, optFunc := range optFuncs { + optFunc(ctx.Option) + } + + buf, err := encodeIndent(ctx, v, prefix, indent) + if err != nil { + encoder.ReleaseRuntimeContext(ctx) + return nil, err + } + + buf = buf[:len(buf)-2] + copied := make([]byte, len(buf)) + copy(copied, buf) + + encoder.ReleaseRuntimeContext(ctx) + return copied, nil +} + +func encode(ctx *encoder.RuntimeContext, v interface{}) ([]byte, error) { + b := ctx.Buf[:0] + if v == nil { + b = encoder.AppendNull(ctx, b) + b = encoder.AppendComma(ctx, b) + return b, nil + } + header := (*emptyInterface)(unsafe.Pointer(&v)) + typ := header.typ + + typeptr := uintptr(unsafe.Pointer(typ)) + codeSet, err := encoder.CompileToGetCodeSet(ctx, typeptr) + if err != nil { + return nil, err + } + + p := uintptr(header.ptr) + ctx.Init(p, codeSet.CodeLength) + ctx.KeepRefs = append(ctx.KeepRefs, header.ptr) + + buf, err := encodeRunCode(ctx, b, codeSet) + if err != nil { + return nil, err + } + ctx.Buf = buf + return buf, nil +} + +func encodeNoEscape(ctx *encoder.RuntimeContext, v interface{}) ([]byte, error) { + b := ctx.Buf[:0] + if v == nil { + b = encoder.AppendNull(ctx, b) + b = encoder.AppendComma(ctx, b) + return b, nil + } + header := (*emptyInterface)(unsafe.Pointer(&v)) + typ := header.typ + + typeptr := uintptr(unsafe.Pointer(typ)) + codeSet, err := encoder.CompileToGetCodeSet(ctx, typeptr) + if err != nil { + return nil, err + } + + p := uintptr(header.ptr) + ctx.Init(p, codeSet.CodeLength) + buf, err := encodeRunCode(ctx, b, codeSet) + if err != nil { + return nil, err + } + + ctx.Buf = buf + return buf, nil +} + +func encodeIndent(ctx *encoder.RuntimeContext, v interface{}, prefix, indent string) ([]byte, error) { + b := ctx.Buf[:0] + if v == nil { + b = encoder.AppendNull(ctx, b) + b = encoder.AppendCommaIndent(ctx, b) + return b, nil + } + header := (*emptyInterface)(unsafe.Pointer(&v)) + typ := header.typ + + typeptr := uintptr(unsafe.Pointer(typ)) + codeSet, err := encoder.CompileToGetCodeSet(ctx, typeptr) + if err != nil { + return nil, err + } + + p := uintptr(header.ptr) + ctx.Init(p, codeSet.CodeLength) + buf, err := encodeRunIndentCode(ctx, b, codeSet, prefix, indent) + + ctx.KeepRefs = append(ctx.KeepRefs, header.ptr) + + if err != nil { + return nil, err + } + + ctx.Buf = buf + return buf, nil +} + +func encodeRunCode(ctx *encoder.RuntimeContext, b []byte, codeSet *encoder.OpcodeSet) ([]byte, error) { + if (ctx.Option.Flag & encoder.DebugOption) != 0 { + if (ctx.Option.Flag & encoder.ColorizeOption) != 0 { + return vm_color.DebugRun(ctx, b, codeSet) + } + return vm.DebugRun(ctx, b, codeSet) + } + if (ctx.Option.Flag & encoder.ColorizeOption) != 0 { + return vm_color.Run(ctx, b, codeSet) + } + return vm.Run(ctx, b, codeSet) +} + +func encodeRunIndentCode(ctx *encoder.RuntimeContext, b []byte, codeSet *encoder.OpcodeSet, prefix, indent string) ([]byte, error) { + ctx.Prefix = []byte(prefix) + ctx.IndentStr = []byte(indent) + if (ctx.Option.Flag & encoder.DebugOption) != 0 { + if (ctx.Option.Flag & encoder.ColorizeOption) != 0 { + return vm_color_indent.DebugRun(ctx, b, codeSet) + } + return vm_indent.DebugRun(ctx, b, codeSet) + } + if (ctx.Option.Flag & encoder.ColorizeOption) != 0 { + return vm_color_indent.Run(ctx, b, codeSet) + } + return vm_indent.Run(ctx, b, codeSet) +} diff --git a/index/server/vendor/github.com/goccy/go-json/error.go b/index/server/vendor/github.com/goccy/go-json/error.go new file mode 100644 index 00000000..94c1339a --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/error.go @@ -0,0 +1,39 @@ +package json + +import ( + "github.com/goccy/go-json/internal/errors" +) + +// Before Go 1.2, an InvalidUTF8Error was returned by Marshal when +// attempting to encode a string value with invalid UTF-8 sequences. +// As of Go 1.2, Marshal instead coerces the string to valid UTF-8 by +// replacing invalid bytes with the Unicode replacement rune U+FFFD. +// +// Deprecated: No longer used; kept for compatibility. +type InvalidUTF8Error = errors.InvalidUTF8Error + +// An InvalidUnmarshalError describes an invalid argument passed to Unmarshal. +// (The argument to Unmarshal must be a non-nil pointer.) +type InvalidUnmarshalError = errors.InvalidUnmarshalError + +// A MarshalerError represents an error from calling a MarshalJSON or MarshalText method. +type MarshalerError = errors.MarshalerError + +// A SyntaxError is a description of a JSON syntax error. +type SyntaxError = errors.SyntaxError + +// An UnmarshalFieldError describes a JSON object key that +// led to an unexported (and therefore unwritable) struct field. +// +// Deprecated: No longer used; kept for compatibility. +type UnmarshalFieldError = errors.UnmarshalFieldError + +// An UnmarshalTypeError describes a JSON value that was +// not appropriate for a value of a specific Go type. +type UnmarshalTypeError = errors.UnmarshalTypeError + +// An UnsupportedTypeError is returned by Marshal when attempting +// to encode an unsupported value type. +type UnsupportedTypeError = errors.UnsupportedTypeError + +type UnsupportedValueError = errors.UnsupportedValueError diff --git a/index/server/vendor/github.com/goccy/go-json/internal/decoder/anonymous_field.go b/index/server/vendor/github.com/goccy/go-json/internal/decoder/anonymous_field.go new file mode 100644 index 00000000..030cb7a9 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/decoder/anonymous_field.go @@ -0,0 +1,37 @@ +package decoder + +import ( + "unsafe" + + "github.com/goccy/go-json/internal/runtime" +) + +type anonymousFieldDecoder struct { + structType *runtime.Type + offset uintptr + dec Decoder +} + +func newAnonymousFieldDecoder(structType *runtime.Type, offset uintptr, dec Decoder) *anonymousFieldDecoder { + return &anonymousFieldDecoder{ + structType: structType, + offset: offset, + dec: dec, + } +} + +func (d *anonymousFieldDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error { + if *(*unsafe.Pointer)(p) == nil { + *(*unsafe.Pointer)(p) = unsafe_New(d.structType) + } + p = *(*unsafe.Pointer)(p) + return d.dec.DecodeStream(s, depth, unsafe.Pointer(uintptr(p)+d.offset)) +} + +func (d *anonymousFieldDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) { + if *(*unsafe.Pointer)(p) == nil { + *(*unsafe.Pointer)(p) = unsafe_New(d.structType) + } + p = *(*unsafe.Pointer)(p) + return d.dec.Decode(ctx, cursor, depth, unsafe.Pointer(uintptr(p)+d.offset)) +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/decoder/array.go b/index/server/vendor/github.com/goccy/go-json/internal/decoder/array.go new file mode 100644 index 00000000..21f1fd58 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/decoder/array.go @@ -0,0 +1,169 @@ +package decoder + +import ( + "unsafe" + + "github.com/goccy/go-json/internal/errors" + "github.com/goccy/go-json/internal/runtime" +) + +type arrayDecoder struct { + elemType *runtime.Type + size uintptr + valueDecoder Decoder + alen int + structName string + fieldName string + zeroValue unsafe.Pointer +} + +func newArrayDecoder(dec Decoder, elemType *runtime.Type, alen int, structName, fieldName string) *arrayDecoder { + zeroValue := *(*unsafe.Pointer)(unsafe_New(elemType)) + return &arrayDecoder{ + valueDecoder: dec, + elemType: elemType, + size: elemType.Size(), + alen: alen, + structName: structName, + fieldName: fieldName, + zeroValue: zeroValue, + } +} + +func (d *arrayDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error { + depth++ + if depth > maxDecodeNestingDepth { + return errors.ErrExceededMaxDepth(s.char(), s.cursor) + } + + for { + switch s.char() { + case ' ', '\n', '\t', '\r': + case 'n': + if err := nullBytes(s); err != nil { + return err + } + return nil + case '[': + idx := 0 + s.cursor++ + if s.skipWhiteSpace() == ']' { + for idx < d.alen { + *(*unsafe.Pointer)(unsafe.Pointer(uintptr(p) + uintptr(idx)*d.size)) = d.zeroValue + idx++ + } + s.cursor++ + return nil + } + for { + if idx < d.alen { + if err := d.valueDecoder.DecodeStream(s, depth, unsafe.Pointer(uintptr(p)+uintptr(idx)*d.size)); err != nil { + return err + } + } else { + if err := s.skipValue(depth); err != nil { + return err + } + } + idx++ + switch s.skipWhiteSpace() { + case ']': + for idx < d.alen { + *(*unsafe.Pointer)(unsafe.Pointer(uintptr(p) + uintptr(idx)*d.size)) = d.zeroValue + idx++ + } + s.cursor++ + return nil + case ',': + s.cursor++ + continue + case nul: + if s.read() { + s.cursor++ + continue + } + goto ERROR + default: + goto ERROR + } + } + case nul: + if s.read() { + continue + } + goto ERROR + default: + goto ERROR + } + s.cursor++ + } +ERROR: + return errors.ErrUnexpectedEndOfJSON("array", s.totalOffset()) +} + +func (d *arrayDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) { + buf := ctx.Buf + depth++ + if depth > maxDecodeNestingDepth { + return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor) + } + + for { + switch buf[cursor] { + case ' ', '\n', '\t', '\r': + cursor++ + continue + case 'n': + if err := validateNull(buf, cursor); err != nil { + return 0, err + } + cursor += 4 + return cursor, nil + case '[': + idx := 0 + cursor++ + cursor = skipWhiteSpace(buf, cursor) + if buf[cursor] == ']' { + for idx < d.alen { + *(*unsafe.Pointer)(unsafe.Pointer(uintptr(p) + uintptr(idx)*d.size)) = d.zeroValue + idx++ + } + cursor++ + return cursor, nil + } + for { + if idx < d.alen { + c, err := d.valueDecoder.Decode(ctx, cursor, depth, unsafe.Pointer(uintptr(p)+uintptr(idx)*d.size)) + if err != nil { + return 0, err + } + cursor = c + } else { + c, err := skipValue(buf, cursor, depth) + if err != nil { + return 0, err + } + cursor = c + } + idx++ + cursor = skipWhiteSpace(buf, cursor) + switch buf[cursor] { + case ']': + for idx < d.alen { + *(*unsafe.Pointer)(unsafe.Pointer(uintptr(p) + uintptr(idx)*d.size)) = d.zeroValue + idx++ + } + cursor++ + return cursor, nil + case ',': + cursor++ + continue + default: + return 0, errors.ErrInvalidCharacter(buf[cursor], "array", cursor) + } + } + default: + return 0, errors.ErrUnexpectedEndOfJSON("array", cursor) + } + } +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/decoder/bool.go b/index/server/vendor/github.com/goccy/go-json/internal/decoder/bool.go new file mode 100644 index 00000000..455042a5 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/decoder/bool.go @@ -0,0 +1,78 @@ +package decoder + +import ( + "unsafe" + + "github.com/goccy/go-json/internal/errors" +) + +type boolDecoder struct { + structName string + fieldName string +} + +func newBoolDecoder(structName, fieldName string) *boolDecoder { + return &boolDecoder{structName: structName, fieldName: fieldName} +} + +func (d *boolDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error { + c := s.skipWhiteSpace() + for { + switch c { + case 't': + if err := trueBytes(s); err != nil { + return err + } + **(**bool)(unsafe.Pointer(&p)) = true + return nil + case 'f': + if err := falseBytes(s); err != nil { + return err + } + **(**bool)(unsafe.Pointer(&p)) = false + return nil + case 'n': + if err := nullBytes(s); err != nil { + return err + } + return nil + case nul: + if s.read() { + c = s.char() + continue + } + goto ERROR + } + break + } +ERROR: + return errors.ErrUnexpectedEndOfJSON("bool", s.totalOffset()) +} + +func (d *boolDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) { + buf := ctx.Buf + cursor = skipWhiteSpace(buf, cursor) + switch buf[cursor] { + case 't': + if err := validateTrue(buf, cursor); err != nil { + return 0, err + } + cursor += 4 + **(**bool)(unsafe.Pointer(&p)) = true + return cursor, nil + case 'f': + if err := validateFalse(buf, cursor); err != nil { + return 0, err + } + cursor += 5 + **(**bool)(unsafe.Pointer(&p)) = false + return cursor, nil + case 'n': + if err := validateNull(buf, cursor); err != nil { + return 0, err + } + cursor += 4 + return cursor, nil + } + return 0, errors.ErrUnexpectedEndOfJSON("bool", cursor) +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/decoder/bytes.go b/index/server/vendor/github.com/goccy/go-json/internal/decoder/bytes.go new file mode 100644 index 00000000..92c7dcf6 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/decoder/bytes.go @@ -0,0 +1,113 @@ +package decoder + +import ( + "encoding/base64" + "unsafe" + + "github.com/goccy/go-json/internal/errors" + "github.com/goccy/go-json/internal/runtime" +) + +type bytesDecoder struct { + typ *runtime.Type + sliceDecoder Decoder + stringDecoder *stringDecoder + structName string + fieldName string +} + +func byteUnmarshalerSliceDecoder(typ *runtime.Type, structName string, fieldName string) Decoder { + var unmarshalDecoder Decoder + switch { + case runtime.PtrTo(typ).Implements(unmarshalJSONType): + unmarshalDecoder = newUnmarshalJSONDecoder(runtime.PtrTo(typ), structName, fieldName) + case runtime.PtrTo(typ).Implements(unmarshalTextType): + unmarshalDecoder = newUnmarshalTextDecoder(runtime.PtrTo(typ), structName, fieldName) + default: + unmarshalDecoder, _ = compileUint8(typ, structName, fieldName) + } + return newSliceDecoder(unmarshalDecoder, typ, 1, structName, fieldName) +} + +func newBytesDecoder(typ *runtime.Type, structName string, fieldName string) *bytesDecoder { + return &bytesDecoder{ + typ: typ, + sliceDecoder: byteUnmarshalerSliceDecoder(typ, structName, fieldName), + stringDecoder: newStringDecoder(structName, fieldName), + structName: structName, + fieldName: fieldName, + } +} + +func (d *bytesDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error { + bytes, err := d.decodeStreamBinary(s, depth, p) + if err != nil { + return err + } + if bytes == nil { + s.reset() + return nil + } + decodedLen := base64.StdEncoding.DecodedLen(len(bytes)) + buf := make([]byte, decodedLen) + n, err := base64.StdEncoding.Decode(buf, bytes) + if err != nil { + return err + } + *(*[]byte)(p) = buf[:n] + s.reset() + return nil +} + +func (d *bytesDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) { + bytes, c, err := d.decodeBinary(ctx, cursor, depth, p) + if err != nil { + return 0, err + } + if bytes == nil { + return c, nil + } + cursor = c + decodedLen := base64.StdEncoding.DecodedLen(len(bytes)) + b := make([]byte, decodedLen) + n, err := base64.StdEncoding.Decode(b, bytes) + if err != nil { + return 0, err + } + *(*[]byte)(p) = b[:n] + return cursor, nil +} + +func (d *bytesDecoder) decodeStreamBinary(s *Stream, depth int64, p unsafe.Pointer) ([]byte, error) { + c := s.skipWhiteSpace() + if c == '[' { + if d.sliceDecoder == nil { + return nil, &errors.UnmarshalTypeError{ + Type: runtime.RType2Type(d.typ), + Offset: s.totalOffset(), + } + } + err := d.sliceDecoder.DecodeStream(s, depth, p) + return nil, err + } + return d.stringDecoder.decodeStreamByte(s) +} + +func (d *bytesDecoder) decodeBinary(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) ([]byte, int64, error) { + buf := ctx.Buf + cursor = skipWhiteSpace(buf, cursor) + if buf[cursor] == '[' { + if d.sliceDecoder == nil { + return nil, 0, &errors.UnmarshalTypeError{ + Type: runtime.RType2Type(d.typ), + Offset: cursor, + } + } + c, err := d.sliceDecoder.Decode(ctx, cursor, depth, p) + if err != nil { + return nil, 0, err + } + return nil, c, nil + } + return d.stringDecoder.decodeByte(buf, cursor) +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/decoder/compile.go b/index/server/vendor/github.com/goccy/go-json/internal/decoder/compile.go new file mode 100644 index 00000000..fab64376 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/decoder/compile.go @@ -0,0 +1,487 @@ +package decoder + +import ( + "encoding/json" + "fmt" + "reflect" + "strings" + "sync/atomic" + "unicode" + "unsafe" + + "github.com/goccy/go-json/internal/runtime" +) + +var ( + jsonNumberType = reflect.TypeOf(json.Number("")) + typeAddr *runtime.TypeAddr + cachedDecoderMap unsafe.Pointer // map[uintptr]decoder + cachedDecoder []Decoder +) + +func init() { + typeAddr = runtime.AnalyzeTypeAddr() + if typeAddr == nil { + typeAddr = &runtime.TypeAddr{} + } + cachedDecoder = make([]Decoder, typeAddr.AddrRange>>typeAddr.AddrShift+1) +} + +func loadDecoderMap() map[uintptr]Decoder { + p := atomic.LoadPointer(&cachedDecoderMap) + return *(*map[uintptr]Decoder)(unsafe.Pointer(&p)) +} + +func storeDecoder(typ uintptr, dec Decoder, m map[uintptr]Decoder) { + newDecoderMap := make(map[uintptr]Decoder, len(m)+1) + newDecoderMap[typ] = dec + + for k, v := range m { + newDecoderMap[k] = v + } + + atomic.StorePointer(&cachedDecoderMap, *(*unsafe.Pointer)(unsafe.Pointer(&newDecoderMap))) +} + +func compileToGetDecoderSlowPath(typeptr uintptr, typ *runtime.Type) (Decoder, error) { + decoderMap := loadDecoderMap() + if dec, exists := decoderMap[typeptr]; exists { + return dec, nil + } + + dec, err := compileHead(typ, map[uintptr]Decoder{}) + if err != nil { + return nil, err + } + storeDecoder(typeptr, dec, decoderMap) + return dec, nil +} + +func compileHead(typ *runtime.Type, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) { + switch { + case implementsUnmarshalJSONType(runtime.PtrTo(typ)): + return newUnmarshalJSONDecoder(runtime.PtrTo(typ), "", ""), nil + case runtime.PtrTo(typ).Implements(unmarshalTextType): + return newUnmarshalTextDecoder(runtime.PtrTo(typ), "", ""), nil + } + return compile(typ.Elem(), "", "", structTypeToDecoder) +} + +func compile(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) { + switch { + case implementsUnmarshalJSONType(runtime.PtrTo(typ)): + return newUnmarshalJSONDecoder(runtime.PtrTo(typ), structName, fieldName), nil + case runtime.PtrTo(typ).Implements(unmarshalTextType): + return newUnmarshalTextDecoder(runtime.PtrTo(typ), structName, fieldName), nil + } + + switch typ.Kind() { + case reflect.Ptr: + return compilePtr(typ, structName, fieldName, structTypeToDecoder) + case reflect.Struct: + return compileStruct(typ, structName, fieldName, structTypeToDecoder) + case reflect.Slice: + elem := typ.Elem() + if elem.Kind() == reflect.Uint8 { + return compileBytes(elem, structName, fieldName) + } + return compileSlice(typ, structName, fieldName, structTypeToDecoder) + case reflect.Array: + return compileArray(typ, structName, fieldName, structTypeToDecoder) + case reflect.Map: + return compileMap(typ, structName, fieldName, structTypeToDecoder) + case reflect.Interface: + return compileInterface(typ, structName, fieldName) + case reflect.Uintptr: + return compileUint(typ, structName, fieldName) + case reflect.Int: + return compileInt(typ, structName, fieldName) + case reflect.Int8: + return compileInt8(typ, structName, fieldName) + case reflect.Int16: + return compileInt16(typ, structName, fieldName) + case reflect.Int32: + return compileInt32(typ, structName, fieldName) + case reflect.Int64: + return compileInt64(typ, structName, fieldName) + case reflect.Uint: + return compileUint(typ, structName, fieldName) + case reflect.Uint8: + return compileUint8(typ, structName, fieldName) + case reflect.Uint16: + return compileUint16(typ, structName, fieldName) + case reflect.Uint32: + return compileUint32(typ, structName, fieldName) + case reflect.Uint64: + return compileUint64(typ, structName, fieldName) + case reflect.String: + return compileString(typ, structName, fieldName) + case reflect.Bool: + return compileBool(structName, fieldName) + case reflect.Float32: + return compileFloat32(structName, fieldName) + case reflect.Float64: + return compileFloat64(structName, fieldName) + case reflect.Func: + return compileFunc(typ, structName, fieldName) + } + return newInvalidDecoder(typ, structName, fieldName), nil +} + +func isStringTagSupportedType(typ *runtime.Type) bool { + switch { + case implementsUnmarshalJSONType(runtime.PtrTo(typ)): + return false + case runtime.PtrTo(typ).Implements(unmarshalTextType): + return false + } + switch typ.Kind() { + case reflect.Map: + return false + case reflect.Slice: + return false + case reflect.Array: + return false + case reflect.Struct: + return false + case reflect.Interface: + return false + } + return true +} + +func compileMapKey(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) { + if runtime.PtrTo(typ).Implements(unmarshalTextType) { + return newUnmarshalTextDecoder(runtime.PtrTo(typ), structName, fieldName), nil + } + if typ.Kind() == reflect.String { + return newStringDecoder(structName, fieldName), nil + } + dec, err := compile(typ, structName, fieldName, structTypeToDecoder) + if err != nil { + return nil, err + } + for { + switch t := dec.(type) { + case *stringDecoder, *interfaceDecoder: + return dec, nil + case *boolDecoder, *intDecoder, *uintDecoder, *numberDecoder: + return newWrappedStringDecoder(typ, dec, structName, fieldName), nil + case *ptrDecoder: + dec = t.dec + default: + return newInvalidDecoder(typ, structName, fieldName), nil + } + } +} + +func compilePtr(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) { + dec, err := compile(typ.Elem(), structName, fieldName, structTypeToDecoder) + if err != nil { + return nil, err + } + return newPtrDecoder(dec, typ.Elem(), structName, fieldName), nil +} + +func compileInt(typ *runtime.Type, structName, fieldName string) (Decoder, error) { + return newIntDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v int64) { + *(*int)(p) = int(v) + }), nil +} + +func compileInt8(typ *runtime.Type, structName, fieldName string) (Decoder, error) { + return newIntDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v int64) { + *(*int8)(p) = int8(v) + }), nil +} + +func compileInt16(typ *runtime.Type, structName, fieldName string) (Decoder, error) { + return newIntDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v int64) { + *(*int16)(p) = int16(v) + }), nil +} + +func compileInt32(typ *runtime.Type, structName, fieldName string) (Decoder, error) { + return newIntDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v int64) { + *(*int32)(p) = int32(v) + }), nil +} + +func compileInt64(typ *runtime.Type, structName, fieldName string) (Decoder, error) { + return newIntDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v int64) { + *(*int64)(p) = v + }), nil +} + +func compileUint(typ *runtime.Type, structName, fieldName string) (Decoder, error) { + return newUintDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v uint64) { + *(*uint)(p) = uint(v) + }), nil +} + +func compileUint8(typ *runtime.Type, structName, fieldName string) (Decoder, error) { + return newUintDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v uint64) { + *(*uint8)(p) = uint8(v) + }), nil +} + +func compileUint16(typ *runtime.Type, structName, fieldName string) (Decoder, error) { + return newUintDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v uint64) { + *(*uint16)(p) = uint16(v) + }), nil +} + +func compileUint32(typ *runtime.Type, structName, fieldName string) (Decoder, error) { + return newUintDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v uint64) { + *(*uint32)(p) = uint32(v) + }), nil +} + +func compileUint64(typ *runtime.Type, structName, fieldName string) (Decoder, error) { + return newUintDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v uint64) { + *(*uint64)(p) = v + }), nil +} + +func compileFloat32(structName, fieldName string) (Decoder, error) { + return newFloatDecoder(structName, fieldName, func(p unsafe.Pointer, v float64) { + *(*float32)(p) = float32(v) + }), nil +} + +func compileFloat64(structName, fieldName string) (Decoder, error) { + return newFloatDecoder(structName, fieldName, func(p unsafe.Pointer, v float64) { + *(*float64)(p) = v + }), nil +} + +func compileString(typ *runtime.Type, structName, fieldName string) (Decoder, error) { + if typ == runtime.Type2RType(jsonNumberType) { + return newNumberDecoder(structName, fieldName, func(p unsafe.Pointer, v json.Number) { + *(*json.Number)(p) = v + }), nil + } + return newStringDecoder(structName, fieldName), nil +} + +func compileBool(structName, fieldName string) (Decoder, error) { + return newBoolDecoder(structName, fieldName), nil +} + +func compileBytes(typ *runtime.Type, structName, fieldName string) (Decoder, error) { + return newBytesDecoder(typ, structName, fieldName), nil +} + +func compileSlice(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) { + elem := typ.Elem() + decoder, err := compile(elem, structName, fieldName, structTypeToDecoder) + if err != nil { + return nil, err + } + return newSliceDecoder(decoder, elem, elem.Size(), structName, fieldName), nil +} + +func compileArray(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) { + elem := typ.Elem() + decoder, err := compile(elem, structName, fieldName, structTypeToDecoder) + if err != nil { + return nil, err + } + return newArrayDecoder(decoder, elem, typ.Len(), structName, fieldName), nil +} + +func compileMap(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) { + keyDec, err := compileMapKey(typ.Key(), structName, fieldName, structTypeToDecoder) + if err != nil { + return nil, err + } + valueDec, err := compile(typ.Elem(), structName, fieldName, structTypeToDecoder) + if err != nil { + return nil, err + } + return newMapDecoder(typ, typ.Key(), keyDec, typ.Elem(), valueDec, structName, fieldName), nil +} + +func compileInterface(typ *runtime.Type, structName, fieldName string) (Decoder, error) { + return newInterfaceDecoder(typ, structName, fieldName), nil +} + +func compileFunc(typ *runtime.Type, strutName, fieldName string) (Decoder, error) { + return newFuncDecoder(typ, strutName, fieldName), nil +} + +func typeToStructTags(typ *runtime.Type) runtime.StructTags { + tags := runtime.StructTags{} + fieldNum := typ.NumField() + for i := 0; i < fieldNum; i++ { + field := typ.Field(i) + if runtime.IsIgnoredStructField(field) { + continue + } + tags = append(tags, runtime.StructTagFromField(field)) + } + return tags +} + +func compileStruct(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) { + fieldNum := typ.NumField() + fieldMap := map[string]*structFieldSet{} + typeptr := uintptr(unsafe.Pointer(typ)) + if dec, exists := structTypeToDecoder[typeptr]; exists { + return dec, nil + } + structDec := newStructDecoder(structName, fieldName, fieldMap) + structTypeToDecoder[typeptr] = structDec + structName = typ.Name() + tags := typeToStructTags(typ) + allFields := []*structFieldSet{} + for i := 0; i < fieldNum; i++ { + field := typ.Field(i) + if runtime.IsIgnoredStructField(field) { + continue + } + isUnexportedField := unicode.IsLower([]rune(field.Name)[0]) + tag := runtime.StructTagFromField(field) + dec, err := compile(runtime.Type2RType(field.Type), structName, field.Name, structTypeToDecoder) + if err != nil { + return nil, err + } + if field.Anonymous && !tag.IsTaggedKey { + if stDec, ok := dec.(*structDecoder); ok { + if runtime.Type2RType(field.Type) == typ { + // recursive definition + continue + } + for k, v := range stDec.fieldMap { + if tags.ExistsKey(k) { + continue + } + fieldSet := &structFieldSet{ + dec: v.dec, + offset: field.Offset + v.offset, + isTaggedKey: v.isTaggedKey, + key: k, + keyLen: int64(len(k)), + } + allFields = append(allFields, fieldSet) + } + } else if pdec, ok := dec.(*ptrDecoder); ok { + contentDec := pdec.contentDecoder() + if pdec.typ == typ { + // recursive definition + continue + } + var fieldSetErr error + if isUnexportedField { + fieldSetErr = fmt.Errorf( + "json: cannot set embedded pointer to unexported struct: %v", + field.Type.Elem(), + ) + } + if dec, ok := contentDec.(*structDecoder); ok { + for k, v := range dec.fieldMap { + if tags.ExistsKey(k) { + continue + } + fieldSet := &structFieldSet{ + dec: newAnonymousFieldDecoder(pdec.typ, v.offset, v.dec), + offset: field.Offset, + isTaggedKey: v.isTaggedKey, + key: k, + keyLen: int64(len(k)), + err: fieldSetErr, + } + allFields = append(allFields, fieldSet) + } + } else { + fieldSet := &structFieldSet{ + dec: pdec, + offset: field.Offset, + isTaggedKey: tag.IsTaggedKey, + key: field.Name, + keyLen: int64(len(field.Name)), + } + allFields = append(allFields, fieldSet) + } + } else { + fieldSet := &structFieldSet{ + dec: dec, + offset: field.Offset, + isTaggedKey: tag.IsTaggedKey, + key: field.Name, + keyLen: int64(len(field.Name)), + } + allFields = append(allFields, fieldSet) + } + } else { + if tag.IsString && isStringTagSupportedType(runtime.Type2RType(field.Type)) { + dec = newWrappedStringDecoder(runtime.Type2RType(field.Type), dec, structName, field.Name) + } + var key string + if tag.Key != "" { + key = tag.Key + } else { + key = field.Name + } + fieldSet := &structFieldSet{ + dec: dec, + offset: field.Offset, + isTaggedKey: tag.IsTaggedKey, + key: key, + keyLen: int64(len(key)), + } + allFields = append(allFields, fieldSet) + } + } + for _, set := range filterDuplicatedFields(allFields) { + fieldMap[set.key] = set + lower := strings.ToLower(set.key) + if _, exists := fieldMap[lower]; !exists { + // first win + fieldMap[lower] = set + } + } + delete(structTypeToDecoder, typeptr) + structDec.tryOptimize() + return structDec, nil +} + +func filterDuplicatedFields(allFields []*structFieldSet) []*structFieldSet { + fieldMap := map[string][]*structFieldSet{} + for _, field := range allFields { + fieldMap[field.key] = append(fieldMap[field.key], field) + } + duplicatedFieldMap := map[string]struct{}{} + for k, sets := range fieldMap { + sets = filterFieldSets(sets) + if len(sets) != 1 { + duplicatedFieldMap[k] = struct{}{} + } + } + + filtered := make([]*structFieldSet, 0, len(allFields)) + for _, field := range allFields { + if _, exists := duplicatedFieldMap[field.key]; exists { + continue + } + filtered = append(filtered, field) + } + return filtered +} + +func filterFieldSets(sets []*structFieldSet) []*structFieldSet { + if len(sets) == 1 { + return sets + } + filtered := make([]*structFieldSet, 0, len(sets)) + for _, set := range sets { + if set.isTaggedKey { + filtered = append(filtered, set) + } + } + return filtered +} + +func implementsUnmarshalJSONType(typ *runtime.Type) bool { + return typ.Implements(unmarshalJSONType) || typ.Implements(unmarshalJSONContextType) +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/decoder/compile_norace.go b/index/server/vendor/github.com/goccy/go-json/internal/decoder/compile_norace.go new file mode 100644 index 00000000..eb7e2b13 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/decoder/compile_norace.go @@ -0,0 +1,29 @@ +//go:build !race +// +build !race + +package decoder + +import ( + "unsafe" + + "github.com/goccy/go-json/internal/runtime" +) + +func CompileToGetDecoder(typ *runtime.Type) (Decoder, error) { + typeptr := uintptr(unsafe.Pointer(typ)) + if typeptr > typeAddr.MaxTypeAddr { + return compileToGetDecoderSlowPath(typeptr, typ) + } + + index := (typeptr - typeAddr.BaseTypeAddr) >> typeAddr.AddrShift + if dec := cachedDecoder[index]; dec != nil { + return dec, nil + } + + dec, err := compileHead(typ, map[uintptr]Decoder{}) + if err != nil { + return nil, err + } + cachedDecoder[index] = dec + return dec, nil +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/decoder/compile_race.go b/index/server/vendor/github.com/goccy/go-json/internal/decoder/compile_race.go new file mode 100644 index 00000000..49cdda4a --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/decoder/compile_race.go @@ -0,0 +1,37 @@ +//go:build race +// +build race + +package decoder + +import ( + "sync" + "unsafe" + + "github.com/goccy/go-json/internal/runtime" +) + +var decMu sync.RWMutex + +func CompileToGetDecoder(typ *runtime.Type) (Decoder, error) { + typeptr := uintptr(unsafe.Pointer(typ)) + if typeptr > typeAddr.MaxTypeAddr { + return compileToGetDecoderSlowPath(typeptr, typ) + } + + index := (typeptr - typeAddr.BaseTypeAddr) >> typeAddr.AddrShift + decMu.RLock() + if dec := cachedDecoder[index]; dec != nil { + decMu.RUnlock() + return dec, nil + } + decMu.RUnlock() + + dec, err := compileHead(typ, map[uintptr]Decoder{}) + if err != nil { + return nil, err + } + decMu.Lock() + cachedDecoder[index] = dec + decMu.Unlock() + return dec, nil +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/decoder/context.go b/index/server/vendor/github.com/goccy/go-json/internal/decoder/context.go new file mode 100644 index 00000000..cb2ffdaf --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/decoder/context.go @@ -0,0 +1,254 @@ +package decoder + +import ( + "sync" + "unsafe" + + "github.com/goccy/go-json/internal/errors" +) + +type RuntimeContext struct { + Buf []byte + Option *Option +} + +var ( + runtimeContextPool = sync.Pool{ + New: func() interface{} { + return &RuntimeContext{ + Option: &Option{}, + } + }, + } +) + +func TakeRuntimeContext() *RuntimeContext { + return runtimeContextPool.Get().(*RuntimeContext) +} + +func ReleaseRuntimeContext(ctx *RuntimeContext) { + runtimeContextPool.Put(ctx) +} + +var ( + isWhiteSpace = [256]bool{} +) + +func init() { + isWhiteSpace[' '] = true + isWhiteSpace['\n'] = true + isWhiteSpace['\t'] = true + isWhiteSpace['\r'] = true +} + +func char(ptr unsafe.Pointer, offset int64) byte { + return *(*byte)(unsafe.Pointer(uintptr(ptr) + uintptr(offset))) +} + +func skipWhiteSpace(buf []byte, cursor int64) int64 { + for isWhiteSpace[buf[cursor]] { + cursor++ + } + return cursor +} + +func skipObject(buf []byte, cursor, depth int64) (int64, error) { + braceCount := 1 + for { + switch buf[cursor] { + case '{': + braceCount++ + depth++ + if depth > maxDecodeNestingDepth { + return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor) + } + case '}': + depth-- + braceCount-- + if braceCount == 0 { + return cursor + 1, nil + } + case '[': + depth++ + if depth > maxDecodeNestingDepth { + return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor) + } + case ']': + depth-- + case '"': + for { + cursor++ + switch buf[cursor] { + case '\\': + cursor++ + if buf[cursor] == nul { + return 0, errors.ErrUnexpectedEndOfJSON("string of object", cursor) + } + case '"': + goto SWITCH_OUT + case nul: + return 0, errors.ErrUnexpectedEndOfJSON("string of object", cursor) + } + } + case nul: + return 0, errors.ErrUnexpectedEndOfJSON("object of object", cursor) + } + SWITCH_OUT: + cursor++ + } +} + +func skipArray(buf []byte, cursor, depth int64) (int64, error) { + bracketCount := 1 + for { + switch buf[cursor] { + case '[': + bracketCount++ + depth++ + if depth > maxDecodeNestingDepth { + return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor) + } + case ']': + bracketCount-- + depth-- + if bracketCount == 0 { + return cursor + 1, nil + } + case '{': + depth++ + if depth > maxDecodeNestingDepth { + return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor) + } + case '}': + depth-- + case '"': + for { + cursor++ + switch buf[cursor] { + case '\\': + cursor++ + if buf[cursor] == nul { + return 0, errors.ErrUnexpectedEndOfJSON("string of object", cursor) + } + case '"': + goto SWITCH_OUT + case nul: + return 0, errors.ErrUnexpectedEndOfJSON("string of object", cursor) + } + } + case nul: + return 0, errors.ErrUnexpectedEndOfJSON("array of object", cursor) + } + SWITCH_OUT: + cursor++ + } +} + +func skipValue(buf []byte, cursor, depth int64) (int64, error) { + for { + switch buf[cursor] { + case ' ', '\t', '\n', '\r': + cursor++ + continue + case '{': + return skipObject(buf, cursor+1, depth+1) + case '[': + return skipArray(buf, cursor+1, depth+1) + case '"': + for { + cursor++ + switch buf[cursor] { + case '\\': + cursor++ + if buf[cursor] == nul { + return 0, errors.ErrUnexpectedEndOfJSON("string of object", cursor) + } + case '"': + return cursor + 1, nil + case nul: + return 0, errors.ErrUnexpectedEndOfJSON("string of object", cursor) + } + } + case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + for { + cursor++ + if floatTable[buf[cursor]] { + continue + } + break + } + return cursor, nil + case 't': + if err := validateTrue(buf, cursor); err != nil { + return 0, err + } + cursor += 4 + return cursor, nil + case 'f': + if err := validateFalse(buf, cursor); err != nil { + return 0, err + } + cursor += 5 + return cursor, nil + case 'n': + if err := validateNull(buf, cursor); err != nil { + return 0, err + } + cursor += 4 + return cursor, nil + default: + return cursor, errors.ErrUnexpectedEndOfJSON("null", cursor) + } + } +} + +func validateTrue(buf []byte, cursor int64) error { + if cursor+3 >= int64(len(buf)) { + return errors.ErrUnexpectedEndOfJSON("true", cursor) + } + if buf[cursor+1] != 'r' { + return errors.ErrInvalidCharacter(buf[cursor+1], "true", cursor) + } + if buf[cursor+2] != 'u' { + return errors.ErrInvalidCharacter(buf[cursor+2], "true", cursor) + } + if buf[cursor+3] != 'e' { + return errors.ErrInvalidCharacter(buf[cursor+3], "true", cursor) + } + return nil +} + +func validateFalse(buf []byte, cursor int64) error { + if cursor+4 >= int64(len(buf)) { + return errors.ErrUnexpectedEndOfJSON("false", cursor) + } + if buf[cursor+1] != 'a' { + return errors.ErrInvalidCharacter(buf[cursor+1], "false", cursor) + } + if buf[cursor+2] != 'l' { + return errors.ErrInvalidCharacter(buf[cursor+2], "false", cursor) + } + if buf[cursor+3] != 's' { + return errors.ErrInvalidCharacter(buf[cursor+3], "false", cursor) + } + if buf[cursor+4] != 'e' { + return errors.ErrInvalidCharacter(buf[cursor+4], "false", cursor) + } + return nil +} + +func validateNull(buf []byte, cursor int64) error { + if cursor+3 >= int64(len(buf)) { + return errors.ErrUnexpectedEndOfJSON("null", cursor) + } + if buf[cursor+1] != 'u' { + return errors.ErrInvalidCharacter(buf[cursor+1], "null", cursor) + } + if buf[cursor+2] != 'l' { + return errors.ErrInvalidCharacter(buf[cursor+2], "null", cursor) + } + if buf[cursor+3] != 'l' { + return errors.ErrInvalidCharacter(buf[cursor+3], "null", cursor) + } + return nil +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/decoder/float.go b/index/server/vendor/github.com/goccy/go-json/internal/decoder/float.go new file mode 100644 index 00000000..dfb7168d --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/decoder/float.go @@ -0,0 +1,158 @@ +package decoder + +import ( + "strconv" + "unsafe" + + "github.com/goccy/go-json/internal/errors" +) + +type floatDecoder struct { + op func(unsafe.Pointer, float64) + structName string + fieldName string +} + +func newFloatDecoder(structName, fieldName string, op func(unsafe.Pointer, float64)) *floatDecoder { + return &floatDecoder{op: op, structName: structName, fieldName: fieldName} +} + +var ( + floatTable = [256]bool{ + '0': true, + '1': true, + '2': true, + '3': true, + '4': true, + '5': true, + '6': true, + '7': true, + '8': true, + '9': true, + '.': true, + 'e': true, + 'E': true, + '+': true, + '-': true, + } + + validEndNumberChar = [256]bool{ + nul: true, + ' ': true, + '\t': true, + '\r': true, + '\n': true, + ',': true, + ':': true, + '}': true, + ']': true, + } +) + +func floatBytes(s *Stream) []byte { + start := s.cursor + for { + s.cursor++ + if floatTable[s.char()] { + continue + } else if s.char() == nul { + if s.read() { + s.cursor-- // for retry current character + continue + } + } + break + } + return s.buf[start:s.cursor] +} + +func (d *floatDecoder) decodeStreamByte(s *Stream) ([]byte, error) { + for { + switch s.char() { + case ' ', '\n', '\t', '\r': + s.cursor++ + continue + case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + return floatBytes(s), nil + case 'n': + if err := nullBytes(s); err != nil { + return nil, err + } + return nil, nil + case nul: + if s.read() { + continue + } + goto ERROR + default: + goto ERROR + } + } +ERROR: + return nil, errors.ErrUnexpectedEndOfJSON("float", s.totalOffset()) +} + +func (d *floatDecoder) decodeByte(buf []byte, cursor int64) ([]byte, int64, error) { + for { + switch buf[cursor] { + case ' ', '\n', '\t', '\r': + cursor++ + continue + case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + start := cursor + cursor++ + for floatTable[buf[cursor]] { + cursor++ + } + num := buf[start:cursor] + return num, cursor, nil + case 'n': + if err := validateNull(buf, cursor); err != nil { + return nil, 0, err + } + cursor += 4 + return nil, cursor, nil + default: + return nil, 0, errors.ErrUnexpectedEndOfJSON("float", cursor) + } + } +} + +func (d *floatDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error { + bytes, err := d.decodeStreamByte(s) + if err != nil { + return err + } + if bytes == nil { + return nil + } + str := *(*string)(unsafe.Pointer(&bytes)) + f64, err := strconv.ParseFloat(str, 64) + if err != nil { + return errors.ErrSyntax(err.Error(), s.totalOffset()) + } + d.op(p, f64) + return nil +} + +func (d *floatDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) { + buf := ctx.Buf + bytes, c, err := d.decodeByte(buf, cursor) + if err != nil { + return 0, err + } + if bytes == nil { + return c, nil + } + cursor = c + if !validEndNumberChar[buf[cursor]] { + return 0, errors.ErrUnexpectedEndOfJSON("float", cursor) + } + s := *(*string)(unsafe.Pointer(&bytes)) + f64, err := strconv.ParseFloat(s, 64) + if err != nil { + return 0, errors.ErrSyntax(err.Error(), cursor) + } + d.op(p, f64) + return cursor, nil +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/decoder/func.go b/index/server/vendor/github.com/goccy/go-json/internal/decoder/func.go new file mode 100644 index 00000000..ee356371 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/decoder/func.go @@ -0,0 +1,141 @@ +package decoder + +import ( + "bytes" + "unsafe" + + "github.com/goccy/go-json/internal/errors" + "github.com/goccy/go-json/internal/runtime" +) + +type funcDecoder struct { + typ *runtime.Type + structName string + fieldName string +} + +func newFuncDecoder(typ *runtime.Type, structName, fieldName string) *funcDecoder { + fnDecoder := &funcDecoder{typ, structName, fieldName} + return fnDecoder +} + +func (d *funcDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error { + s.skipWhiteSpace() + start := s.cursor + if err := s.skipValue(depth); err != nil { + return err + } + src := s.buf[start:s.cursor] + if len(src) > 0 { + switch src[0] { + case '"': + return &errors.UnmarshalTypeError{ + Value: "string", + Type: runtime.RType2Type(d.typ), + Offset: s.totalOffset(), + } + case '[': + return &errors.UnmarshalTypeError{ + Value: "array", + Type: runtime.RType2Type(d.typ), + Offset: s.totalOffset(), + } + case '{': + return &errors.UnmarshalTypeError{ + Value: "object", + Type: runtime.RType2Type(d.typ), + Offset: s.totalOffset(), + } + case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + return &errors.UnmarshalTypeError{ + Value: "number", + Type: runtime.RType2Type(d.typ), + Offset: s.totalOffset(), + } + case 'n': + if err := nullBytes(s); err != nil { + return err + } + *(*unsafe.Pointer)(p) = nil + return nil + case 't': + if err := trueBytes(s); err == nil { + return &errors.UnmarshalTypeError{ + Value: "boolean", + Type: runtime.RType2Type(d.typ), + Offset: s.totalOffset(), + } + } + case 'f': + if err := falseBytes(s); err == nil { + return &errors.UnmarshalTypeError{ + Value: "boolean", + Type: runtime.RType2Type(d.typ), + Offset: s.totalOffset(), + } + } + } + } + return errors.ErrInvalidBeginningOfValue(s.buf[s.cursor], s.totalOffset()) +} + +func (d *funcDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) { + buf := ctx.Buf + cursor = skipWhiteSpace(buf, cursor) + start := cursor + end, err := skipValue(buf, cursor, depth) + if err != nil { + return 0, err + } + src := buf[start:end] + if len(src) > 0 { + switch src[0] { + case '"': + return 0, &errors.UnmarshalTypeError{ + Value: "string", + Type: runtime.RType2Type(d.typ), + Offset: start, + } + case '[': + return 0, &errors.UnmarshalTypeError{ + Value: "array", + Type: runtime.RType2Type(d.typ), + Offset: start, + } + case '{': + return 0, &errors.UnmarshalTypeError{ + Value: "object", + Type: runtime.RType2Type(d.typ), + Offset: start, + } + case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + return 0, &errors.UnmarshalTypeError{ + Value: "number", + Type: runtime.RType2Type(d.typ), + Offset: start, + } + case 'n': + if bytes.Equal(src, nullbytes) { + *(*unsafe.Pointer)(p) = nil + return end, nil + } + case 't': + if err := validateTrue(buf, start); err == nil { + return 0, &errors.UnmarshalTypeError{ + Value: "boolean", + Type: runtime.RType2Type(d.typ), + Offset: start, + } + } + case 'f': + if err := validateFalse(buf, start); err == nil { + return 0, &errors.UnmarshalTypeError{ + Value: "boolean", + Type: runtime.RType2Type(d.typ), + Offset: start, + } + } + } + } + return cursor, errors.ErrInvalidBeginningOfValue(buf[cursor], cursor) +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/decoder/int.go b/index/server/vendor/github.com/goccy/go-json/internal/decoder/int.go new file mode 100644 index 00000000..509b753d --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/decoder/int.go @@ -0,0 +1,242 @@ +package decoder + +import ( + "fmt" + "reflect" + "unsafe" + + "github.com/goccy/go-json/internal/errors" + "github.com/goccy/go-json/internal/runtime" +) + +type intDecoder struct { + typ *runtime.Type + kind reflect.Kind + op func(unsafe.Pointer, int64) + structName string + fieldName string +} + +func newIntDecoder(typ *runtime.Type, structName, fieldName string, op func(unsafe.Pointer, int64)) *intDecoder { + return &intDecoder{ + typ: typ, + kind: typ.Kind(), + op: op, + structName: structName, + fieldName: fieldName, + } +} + +func (d *intDecoder) typeError(buf []byte, offset int64) *errors.UnmarshalTypeError { + return &errors.UnmarshalTypeError{ + Value: fmt.Sprintf("number %s", string(buf)), + Type: runtime.RType2Type(d.typ), + Struct: d.structName, + Field: d.fieldName, + Offset: offset, + } +} + +var ( + pow10i64 = [...]int64{ + 1e00, 1e01, 1e02, 1e03, 1e04, 1e05, 1e06, 1e07, 1e08, 1e09, + 1e10, 1e11, 1e12, 1e13, 1e14, 1e15, 1e16, 1e17, 1e18, + } + pow10i64Len = len(pow10i64) +) + +func (d *intDecoder) parseInt(b []byte) (int64, error) { + isNegative := false + if b[0] == '-' { + b = b[1:] + isNegative = true + } + maxDigit := len(b) + if maxDigit > pow10i64Len { + return 0, fmt.Errorf("invalid length of number") + } + sum := int64(0) + for i := 0; i < maxDigit; i++ { + c := int64(b[i]) - 48 + digitValue := pow10i64[maxDigit-i-1] + sum += c * digitValue + } + if isNegative { + return -1 * sum, nil + } + return sum, nil +} + +var ( + numTable = [256]bool{ + '0': true, + '1': true, + '2': true, + '3': true, + '4': true, + '5': true, + '6': true, + '7': true, + '8': true, + '9': true, + } +) + +var ( + numZeroBuf = []byte{'0'} +) + +func (d *intDecoder) decodeStreamByte(s *Stream) ([]byte, error) { + for { + switch s.char() { + case ' ', '\n', '\t', '\r': + s.cursor++ + continue + case '-': + start := s.cursor + for { + s.cursor++ + if numTable[s.char()] { + continue + } else if s.char() == nul { + if s.read() { + s.cursor-- // for retry current character + continue + } + } + break + } + num := s.buf[start:s.cursor] + if len(num) < 2 { + goto ERROR + } + return num, nil + case '0': + s.cursor++ + return numZeroBuf, nil + case '1', '2', '3', '4', '5', '6', '7', '8', '9': + start := s.cursor + for { + s.cursor++ + if numTable[s.char()] { + continue + } else if s.char() == nul { + if s.read() { + s.cursor-- // for retry current character + continue + } + } + break + } + num := s.buf[start:s.cursor] + return num, nil + case 'n': + if err := nullBytes(s); err != nil { + return nil, err + } + return nil, nil + case nul: + if s.read() { + continue + } + goto ERROR + default: + return nil, d.typeError([]byte{s.char()}, s.totalOffset()) + } + } +ERROR: + return nil, errors.ErrUnexpectedEndOfJSON("number(integer)", s.totalOffset()) +} + +func (d *intDecoder) decodeByte(buf []byte, cursor int64) ([]byte, int64, error) { + b := (*sliceHeader)(unsafe.Pointer(&buf)).data + for { + switch char(b, cursor) { + case ' ', '\n', '\t', '\r': + cursor++ + continue + case '0': + cursor++ + return numZeroBuf, cursor, nil + case '-', '1', '2', '3', '4', '5', '6', '7', '8', '9': + start := cursor + cursor++ + for numTable[char(b, cursor)] { + cursor++ + } + num := buf[start:cursor] + return num, cursor, nil + case 'n': + if err := validateNull(buf, cursor); err != nil { + return nil, 0, err + } + cursor += 4 + return nil, cursor, nil + default: + return nil, 0, d.typeError([]byte{char(b, cursor)}, cursor) + } + } +} + +func (d *intDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error { + bytes, err := d.decodeStreamByte(s) + if err != nil { + return err + } + if bytes == nil { + return nil + } + i64, err := d.parseInt(bytes) + if err != nil { + return d.typeError(bytes, s.totalOffset()) + } + switch d.kind { + case reflect.Int8: + if i64 < -1*(1<<7) || (1<<7) <= i64 { + return d.typeError(bytes, s.totalOffset()) + } + case reflect.Int16: + if i64 < -1*(1<<15) || (1<<15) <= i64 { + return d.typeError(bytes, s.totalOffset()) + } + case reflect.Int32: + if i64 < -1*(1<<31) || (1<<31) <= i64 { + return d.typeError(bytes, s.totalOffset()) + } + } + d.op(p, i64) + s.reset() + return nil +} + +func (d *intDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) { + bytes, c, err := d.decodeByte(ctx.Buf, cursor) + if err != nil { + return 0, err + } + if bytes == nil { + return c, nil + } + cursor = c + + i64, err := d.parseInt(bytes) + if err != nil { + return 0, d.typeError(bytes, cursor) + } + switch d.kind { + case reflect.Int8: + if i64 < -1*(1<<7) || (1<<7) <= i64 { + return 0, d.typeError(bytes, cursor) + } + case reflect.Int16: + if i64 < -1*(1<<15) || (1<<15) <= i64 { + return 0, d.typeError(bytes, cursor) + } + case reflect.Int32: + if i64 < -1*(1<<31) || (1<<31) <= i64 { + return 0, d.typeError(bytes, cursor) + } + } + d.op(p, i64) + return cursor, nil +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/decoder/interface.go b/index/server/vendor/github.com/goccy/go-json/internal/decoder/interface.go new file mode 100644 index 00000000..4dbb4be4 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/decoder/interface.go @@ -0,0 +1,458 @@ +package decoder + +import ( + "bytes" + "encoding" + "encoding/json" + "reflect" + "unsafe" + + "github.com/goccy/go-json/internal/errors" + "github.com/goccy/go-json/internal/runtime" +) + +type interfaceDecoder struct { + typ *runtime.Type + structName string + fieldName string + sliceDecoder *sliceDecoder + mapDecoder *mapDecoder + floatDecoder *floatDecoder + numberDecoder *numberDecoder + stringDecoder *stringDecoder +} + +func newEmptyInterfaceDecoder(structName, fieldName string) *interfaceDecoder { + ifaceDecoder := &interfaceDecoder{ + typ: emptyInterfaceType, + structName: structName, + fieldName: fieldName, + floatDecoder: newFloatDecoder(structName, fieldName, func(p unsafe.Pointer, v float64) { + *(*interface{})(p) = v + }), + numberDecoder: newNumberDecoder(structName, fieldName, func(p unsafe.Pointer, v json.Number) { + *(*interface{})(p) = v + }), + stringDecoder: newStringDecoder(structName, fieldName), + } + ifaceDecoder.sliceDecoder = newSliceDecoder( + ifaceDecoder, + emptyInterfaceType, + emptyInterfaceType.Size(), + structName, fieldName, + ) + ifaceDecoder.mapDecoder = newMapDecoder( + interfaceMapType, + stringType, + ifaceDecoder.stringDecoder, + interfaceMapType.Elem(), + ifaceDecoder, + structName, + fieldName, + ) + return ifaceDecoder +} + +func newInterfaceDecoder(typ *runtime.Type, structName, fieldName string) *interfaceDecoder { + emptyIfaceDecoder := newEmptyInterfaceDecoder(structName, fieldName) + stringDecoder := newStringDecoder(structName, fieldName) + return &interfaceDecoder{ + typ: typ, + structName: structName, + fieldName: fieldName, + sliceDecoder: newSliceDecoder( + emptyIfaceDecoder, + emptyInterfaceType, + emptyInterfaceType.Size(), + structName, fieldName, + ), + mapDecoder: newMapDecoder( + interfaceMapType, + stringType, + stringDecoder, + interfaceMapType.Elem(), + emptyIfaceDecoder, + structName, + fieldName, + ), + floatDecoder: newFloatDecoder(structName, fieldName, func(p unsafe.Pointer, v float64) { + *(*interface{})(p) = v + }), + numberDecoder: newNumberDecoder(structName, fieldName, func(p unsafe.Pointer, v json.Number) { + *(*interface{})(p) = v + }), + stringDecoder: stringDecoder, + } +} + +func (d *interfaceDecoder) numDecoder(s *Stream) Decoder { + if s.UseNumber { + return d.numberDecoder + } + return d.floatDecoder +} + +var ( + emptyInterfaceType = runtime.Type2RType(reflect.TypeOf((*interface{})(nil)).Elem()) + interfaceMapType = runtime.Type2RType( + reflect.TypeOf((*map[string]interface{})(nil)).Elem(), + ) + stringType = runtime.Type2RType( + reflect.TypeOf(""), + ) +) + +func decodeStreamUnmarshaler(s *Stream, depth int64, unmarshaler json.Unmarshaler) error { + start := s.cursor + if err := s.skipValue(depth); err != nil { + return err + } + src := s.buf[start:s.cursor] + dst := make([]byte, len(src)) + copy(dst, src) + + if err := unmarshaler.UnmarshalJSON(dst); err != nil { + return err + } + return nil +} + +func decodeStreamUnmarshalerContext(s *Stream, depth int64, unmarshaler unmarshalerContext) error { + start := s.cursor + if err := s.skipValue(depth); err != nil { + return err + } + src := s.buf[start:s.cursor] + dst := make([]byte, len(src)) + copy(dst, src) + + if err := unmarshaler.UnmarshalJSON(s.Option.Context, dst); err != nil { + return err + } + return nil +} + +func decodeUnmarshaler(buf []byte, cursor, depth int64, unmarshaler json.Unmarshaler) (int64, error) { + cursor = skipWhiteSpace(buf, cursor) + start := cursor + end, err := skipValue(buf, cursor, depth) + if err != nil { + return 0, err + } + src := buf[start:end] + dst := make([]byte, len(src)) + copy(dst, src) + + if err := unmarshaler.UnmarshalJSON(dst); err != nil { + return 0, err + } + return end, nil +} + +func decodeUnmarshalerContext(ctx *RuntimeContext, buf []byte, cursor, depth int64, unmarshaler unmarshalerContext) (int64, error) { + cursor = skipWhiteSpace(buf, cursor) + start := cursor + end, err := skipValue(buf, cursor, depth) + if err != nil { + return 0, err + } + src := buf[start:end] + dst := make([]byte, len(src)) + copy(dst, src) + + if err := unmarshaler.UnmarshalJSON(ctx.Option.Context, dst); err != nil { + return 0, err + } + return end, nil +} + +func decodeStreamTextUnmarshaler(s *Stream, depth int64, unmarshaler encoding.TextUnmarshaler, p unsafe.Pointer) error { + start := s.cursor + if err := s.skipValue(depth); err != nil { + return err + } + src := s.buf[start:s.cursor] + if bytes.Equal(src, nullbytes) { + *(*unsafe.Pointer)(p) = nil + return nil + } + + dst := make([]byte, len(src)) + copy(dst, src) + + if err := unmarshaler.UnmarshalText(dst); err != nil { + return err + } + return nil +} + +func decodeTextUnmarshaler(buf []byte, cursor, depth int64, unmarshaler encoding.TextUnmarshaler, p unsafe.Pointer) (int64, error) { + cursor = skipWhiteSpace(buf, cursor) + start := cursor + end, err := skipValue(buf, cursor, depth) + if err != nil { + return 0, err + } + src := buf[start:end] + if bytes.Equal(src, nullbytes) { + *(*unsafe.Pointer)(p) = nil + return end, nil + } + if s, ok := unquoteBytes(src); ok { + src = s + } + if err := unmarshaler.UnmarshalText(src); err != nil { + return 0, err + } + return end, nil +} + +func (d *interfaceDecoder) decodeStreamEmptyInterface(s *Stream, depth int64, p unsafe.Pointer) error { + c := s.skipWhiteSpace() + for { + switch c { + case '{': + var v map[string]interface{} + ptr := unsafe.Pointer(&v) + if err := d.mapDecoder.DecodeStream(s, depth, ptr); err != nil { + return err + } + *(*interface{})(p) = v + return nil + case '[': + var v []interface{} + ptr := unsafe.Pointer(&v) + if err := d.sliceDecoder.DecodeStream(s, depth, ptr); err != nil { + return err + } + *(*interface{})(p) = v + return nil + case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + return d.numDecoder(s).DecodeStream(s, depth, p) + case '"': + s.cursor++ + start := s.cursor + for { + switch s.char() { + case '\\': + if _, err := decodeEscapeString(s, nil); err != nil { + return err + } + case '"': + literal := s.buf[start:s.cursor] + s.cursor++ + *(*interface{})(p) = string(literal) + return nil + case nul: + if s.read() { + continue + } + return errors.ErrUnexpectedEndOfJSON("string", s.totalOffset()) + } + s.cursor++ + } + case 't': + if err := trueBytes(s); err != nil { + return err + } + **(**interface{})(unsafe.Pointer(&p)) = true + return nil + case 'f': + if err := falseBytes(s); err != nil { + return err + } + **(**interface{})(unsafe.Pointer(&p)) = false + return nil + case 'n': + if err := nullBytes(s); err != nil { + return err + } + *(*interface{})(p) = nil + return nil + case nul: + if s.read() { + c = s.char() + continue + } + } + break + } + return errors.ErrInvalidBeginningOfValue(c, s.totalOffset()) +} + +type emptyInterface struct { + typ *runtime.Type + ptr unsafe.Pointer +} + +func (d *interfaceDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error { + runtimeInterfaceValue := *(*interface{})(unsafe.Pointer(&emptyInterface{ + typ: d.typ, + ptr: p, + })) + rv := reflect.ValueOf(runtimeInterfaceValue) + if rv.NumMethod() > 0 && rv.CanInterface() { + if u, ok := rv.Interface().(unmarshalerContext); ok { + return decodeStreamUnmarshalerContext(s, depth, u) + } + if u, ok := rv.Interface().(json.Unmarshaler); ok { + return decodeStreamUnmarshaler(s, depth, u) + } + if u, ok := rv.Interface().(encoding.TextUnmarshaler); ok { + return decodeStreamTextUnmarshaler(s, depth, u, p) + } + if s.skipWhiteSpace() == 'n' { + if err := nullBytes(s); err != nil { + return err + } + *(*interface{})(p) = nil + return nil + } + return d.errUnmarshalType(rv.Type(), s.totalOffset()) + } + iface := rv.Interface() + ifaceHeader := (*emptyInterface)(unsafe.Pointer(&iface)) + typ := ifaceHeader.typ + if ifaceHeader.ptr == nil || d.typ == typ || typ == nil { + // concrete type is empty interface + return d.decodeStreamEmptyInterface(s, depth, p) + } + if typ.Kind() == reflect.Ptr && typ.Elem() == d.typ || typ.Kind() != reflect.Ptr { + return d.decodeStreamEmptyInterface(s, depth, p) + } + if s.skipWhiteSpace() == 'n' { + if err := nullBytes(s); err != nil { + return err + } + *(*interface{})(p) = nil + return nil + } + decoder, err := CompileToGetDecoder(typ) + if err != nil { + return err + } + return decoder.DecodeStream(s, depth, ifaceHeader.ptr) +} + +func (d *interfaceDecoder) errUnmarshalType(typ reflect.Type, offset int64) *errors.UnmarshalTypeError { + return &errors.UnmarshalTypeError{ + Value: typ.String(), + Type: typ, + Offset: offset, + Struct: d.structName, + Field: d.fieldName, + } +} + +func (d *interfaceDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) { + buf := ctx.Buf + runtimeInterfaceValue := *(*interface{})(unsafe.Pointer(&emptyInterface{ + typ: d.typ, + ptr: p, + })) + rv := reflect.ValueOf(runtimeInterfaceValue) + if rv.NumMethod() > 0 && rv.CanInterface() { + if u, ok := rv.Interface().(unmarshalerContext); ok { + return decodeUnmarshalerContext(ctx, buf, cursor, depth, u) + } + if u, ok := rv.Interface().(json.Unmarshaler); ok { + return decodeUnmarshaler(buf, cursor, depth, u) + } + if u, ok := rv.Interface().(encoding.TextUnmarshaler); ok { + return decodeTextUnmarshaler(buf, cursor, depth, u, p) + } + cursor = skipWhiteSpace(buf, cursor) + if buf[cursor] == 'n' { + if err := validateNull(buf, cursor); err != nil { + return 0, err + } + cursor += 4 + **(**interface{})(unsafe.Pointer(&p)) = nil + return cursor, nil + } + return 0, d.errUnmarshalType(rv.Type(), cursor) + } + + iface := rv.Interface() + ifaceHeader := (*emptyInterface)(unsafe.Pointer(&iface)) + typ := ifaceHeader.typ + if ifaceHeader.ptr == nil || d.typ == typ || typ == nil { + // concrete type is empty interface + return d.decodeEmptyInterface(ctx, cursor, depth, p) + } + if typ.Kind() == reflect.Ptr && typ.Elem() == d.typ || typ.Kind() != reflect.Ptr { + return d.decodeEmptyInterface(ctx, cursor, depth, p) + } + cursor = skipWhiteSpace(buf, cursor) + if buf[cursor] == 'n' { + if err := validateNull(buf, cursor); err != nil { + return 0, err + } + cursor += 4 + **(**interface{})(unsafe.Pointer(&p)) = nil + return cursor, nil + } + decoder, err := CompileToGetDecoder(typ) + if err != nil { + return 0, err + } + return decoder.Decode(ctx, cursor, depth, ifaceHeader.ptr) +} + +func (d *interfaceDecoder) decodeEmptyInterface(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) { + buf := ctx.Buf + cursor = skipWhiteSpace(buf, cursor) + switch buf[cursor] { + case '{': + var v map[string]interface{} + ptr := unsafe.Pointer(&v) + cursor, err := d.mapDecoder.Decode(ctx, cursor, depth, ptr) + if err != nil { + return 0, err + } + **(**interface{})(unsafe.Pointer(&p)) = v + return cursor, nil + case '[': + var v []interface{} + ptr := unsafe.Pointer(&v) + cursor, err := d.sliceDecoder.Decode(ctx, cursor, depth, ptr) + if err != nil { + return 0, err + } + **(**interface{})(unsafe.Pointer(&p)) = v + return cursor, nil + case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + return d.floatDecoder.Decode(ctx, cursor, depth, p) + case '"': + var v string + ptr := unsafe.Pointer(&v) + cursor, err := d.stringDecoder.Decode(ctx, cursor, depth, ptr) + if err != nil { + return 0, err + } + **(**interface{})(unsafe.Pointer(&p)) = v + return cursor, nil + case 't': + if err := validateTrue(buf, cursor); err != nil { + return 0, err + } + cursor += 4 + **(**interface{})(unsafe.Pointer(&p)) = true + return cursor, nil + case 'f': + if err := validateFalse(buf, cursor); err != nil { + return 0, err + } + cursor += 5 + **(**interface{})(unsafe.Pointer(&p)) = false + return cursor, nil + case 'n': + if err := validateNull(buf, cursor); err != nil { + return 0, err + } + cursor += 4 + **(**interface{})(unsafe.Pointer(&p)) = nil + return cursor, nil + } + return cursor, errors.ErrInvalidBeginningOfValue(buf[cursor], cursor) +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/decoder/invalid.go b/index/server/vendor/github.com/goccy/go-json/internal/decoder/invalid.go new file mode 100644 index 00000000..1ef50a7d --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/decoder/invalid.go @@ -0,0 +1,45 @@ +package decoder + +import ( + "reflect" + "unsafe" + + "github.com/goccy/go-json/internal/errors" + "github.com/goccy/go-json/internal/runtime" +) + +type invalidDecoder struct { + typ *runtime.Type + kind reflect.Kind + structName string + fieldName string +} + +func newInvalidDecoder(typ *runtime.Type, structName, fieldName string) *invalidDecoder { + return &invalidDecoder{ + typ: typ, + kind: typ.Kind(), + structName: structName, + fieldName: fieldName, + } +} + +func (d *invalidDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error { + return &errors.UnmarshalTypeError{ + Value: "object", + Type: runtime.RType2Type(d.typ), + Offset: s.totalOffset(), + Struct: d.structName, + Field: d.fieldName, + } +} + +func (d *invalidDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) { + return 0, &errors.UnmarshalTypeError{ + Value: "object", + Type: runtime.RType2Type(d.typ), + Offset: cursor, + Struct: d.structName, + Field: d.fieldName, + } +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/decoder/map.go b/index/server/vendor/github.com/goccy/go-json/internal/decoder/map.go new file mode 100644 index 00000000..cb55ef00 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/decoder/map.go @@ -0,0 +1,187 @@ +package decoder + +import ( + "reflect" + "unsafe" + + "github.com/goccy/go-json/internal/errors" + "github.com/goccy/go-json/internal/runtime" +) + +type mapDecoder struct { + mapType *runtime.Type + keyType *runtime.Type + valueType *runtime.Type + canUseAssignFaststrType bool + keyDecoder Decoder + valueDecoder Decoder + structName string + fieldName string +} + +func newMapDecoder(mapType *runtime.Type, keyType *runtime.Type, keyDec Decoder, valueType *runtime.Type, valueDec Decoder, structName, fieldName string) *mapDecoder { + return &mapDecoder{ + mapType: mapType, + keyDecoder: keyDec, + keyType: keyType, + canUseAssignFaststrType: canUseAssignFaststrType(keyType, valueType), + valueType: valueType, + valueDecoder: valueDec, + structName: structName, + fieldName: fieldName, + } +} + +const ( + mapMaxElemSize = 128 +) + +// See detail: https://github.com/goccy/go-json/pull/283 +func canUseAssignFaststrType(key *runtime.Type, value *runtime.Type) bool { + indirectElem := value.Size() > mapMaxElemSize + if indirectElem { + return false + } + return key.Kind() == reflect.String +} + +//go:linkname makemap reflect.makemap +func makemap(*runtime.Type, int) unsafe.Pointer + +//nolint:golint +//go:linkname mapassign_faststr runtime.mapassign_faststr +//go:noescape +func mapassign_faststr(t *runtime.Type, m unsafe.Pointer, s string) unsafe.Pointer + +//go:linkname mapassign reflect.mapassign +//go:noescape +func mapassign(t *runtime.Type, m unsafe.Pointer, k, v unsafe.Pointer) + +func (d *mapDecoder) mapassign(t *runtime.Type, m, k, v unsafe.Pointer) { + if d.canUseAssignFaststrType { + mapV := mapassign_faststr(t, m, *(*string)(k)) + typedmemmove(d.valueType, mapV, v) + } else { + mapassign(t, m, k, v) + } +} + +func (d *mapDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error { + depth++ + if depth > maxDecodeNestingDepth { + return errors.ErrExceededMaxDepth(s.char(), s.cursor) + } + + switch s.skipWhiteSpace() { + case 'n': + if err := nullBytes(s); err != nil { + return err + } + **(**unsafe.Pointer)(unsafe.Pointer(&p)) = nil + return nil + case '{': + default: + return errors.ErrExpected("{ character for map value", s.totalOffset()) + } + mapValue := *(*unsafe.Pointer)(p) + if mapValue == nil { + mapValue = makemap(d.mapType, 0) + } + s.cursor++ + if s.equalChar('}') { + *(*unsafe.Pointer)(p) = mapValue + s.cursor++ + return nil + } + for { + k := unsafe_New(d.keyType) + if err := d.keyDecoder.DecodeStream(s, depth, k); err != nil { + return err + } + s.skipWhiteSpace() + if !s.equalChar(':') { + return errors.ErrExpected("colon after object key", s.totalOffset()) + } + s.cursor++ + v := unsafe_New(d.valueType) + if err := d.valueDecoder.DecodeStream(s, depth, v); err != nil { + return err + } + d.mapassign(d.mapType, mapValue, k, v) + s.skipWhiteSpace() + if s.equalChar('}') { + **(**unsafe.Pointer)(unsafe.Pointer(&p)) = mapValue + s.cursor++ + return nil + } + if !s.equalChar(',') { + return errors.ErrExpected("comma after object value", s.totalOffset()) + } + s.cursor++ + } +} + +func (d *mapDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) { + buf := ctx.Buf + depth++ + if depth > maxDecodeNestingDepth { + return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor) + } + + cursor = skipWhiteSpace(buf, cursor) + buflen := int64(len(buf)) + if buflen < 2 { + return 0, errors.ErrExpected("{} for map", cursor) + } + switch buf[cursor] { + case 'n': + if err := validateNull(buf, cursor); err != nil { + return 0, err + } + cursor += 4 + **(**unsafe.Pointer)(unsafe.Pointer(&p)) = nil + return cursor, nil + case '{': + default: + return 0, errors.ErrExpected("{ character for map value", cursor) + } + cursor++ + cursor = skipWhiteSpace(buf, cursor) + mapValue := *(*unsafe.Pointer)(p) + if mapValue == nil { + mapValue = makemap(d.mapType, 0) + } + if buf[cursor] == '}' { + **(**unsafe.Pointer)(unsafe.Pointer(&p)) = mapValue + cursor++ + return cursor, nil + } + for { + k := unsafe_New(d.keyType) + keyCursor, err := d.keyDecoder.Decode(ctx, cursor, depth, k) + if err != nil { + return 0, err + } + cursor = skipWhiteSpace(buf, keyCursor) + if buf[cursor] != ':' { + return 0, errors.ErrExpected("colon after object key", cursor) + } + cursor++ + v := unsafe_New(d.valueType) + valueCursor, err := d.valueDecoder.Decode(ctx, cursor, depth, v) + if err != nil { + return 0, err + } + d.mapassign(d.mapType, mapValue, k, v) + cursor = skipWhiteSpace(buf, valueCursor) + if buf[cursor] == '}' { + **(**unsafe.Pointer)(unsafe.Pointer(&p)) = mapValue + cursor++ + return cursor, nil + } + if buf[cursor] != ',' { + return 0, errors.ErrExpected("comma after object value", cursor) + } + cursor++ + } +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/decoder/number.go b/index/server/vendor/github.com/goccy/go-json/internal/decoder/number.go new file mode 100644 index 00000000..bf63773e --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/decoder/number.go @@ -0,0 +1,112 @@ +package decoder + +import ( + "encoding/json" + "strconv" + "unsafe" + + "github.com/goccy/go-json/internal/errors" +) + +type numberDecoder struct { + stringDecoder *stringDecoder + op func(unsafe.Pointer, json.Number) + structName string + fieldName string +} + +func newNumberDecoder(structName, fieldName string, op func(unsafe.Pointer, json.Number)) *numberDecoder { + return &numberDecoder{ + stringDecoder: newStringDecoder(structName, fieldName), + op: op, + structName: structName, + fieldName: fieldName, + } +} + +func (d *numberDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error { + bytes, err := d.decodeStreamByte(s) + if err != nil { + return err + } + if _, err := strconv.ParseFloat(*(*string)(unsafe.Pointer(&bytes)), 64); err != nil { + return errors.ErrSyntax(err.Error(), s.totalOffset()) + } + d.op(p, json.Number(string(bytes))) + s.reset() + return nil +} + +func (d *numberDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) { + bytes, c, err := d.decodeByte(ctx.Buf, cursor) + if err != nil { + return 0, err + } + if _, err := strconv.ParseFloat(*(*string)(unsafe.Pointer(&bytes)), 64); err != nil { + return 0, errors.ErrSyntax(err.Error(), c) + } + cursor = c + s := *(*string)(unsafe.Pointer(&bytes)) + d.op(p, json.Number(s)) + return cursor, nil +} + +func (d *numberDecoder) decodeStreamByte(s *Stream) ([]byte, error) { + start := s.cursor + for { + switch s.char() { + case ' ', '\n', '\t', '\r': + s.cursor++ + continue + case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + return floatBytes(s), nil + case 'n': + if err := nullBytes(s); err != nil { + return nil, err + } + return nil, nil + case '"': + return d.stringDecoder.decodeStreamByte(s) + case nul: + if s.read() { + continue + } + goto ERROR + default: + goto ERROR + } + } +ERROR: + if s.cursor == start { + return nil, errors.ErrInvalidBeginningOfValue(s.char(), s.totalOffset()) + } + return nil, errors.ErrUnexpectedEndOfJSON("json.Number", s.totalOffset()) +} + +func (d *numberDecoder) decodeByte(buf []byte, cursor int64) ([]byte, int64, error) { + for { + switch buf[cursor] { + case ' ', '\n', '\t', '\r': + cursor++ + continue + case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + start := cursor + cursor++ + for floatTable[buf[cursor]] { + cursor++ + } + num := buf[start:cursor] + return num, cursor, nil + case 'n': + if err := validateNull(buf, cursor); err != nil { + return nil, 0, err + } + cursor += 4 + return nil, cursor, nil + case '"': + return d.stringDecoder.decodeByte(buf, cursor) + default: + return nil, 0, errors.ErrUnexpectedEndOfJSON("json.Number", cursor) + } + } +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/decoder/option.go b/index/server/vendor/github.com/goccy/go-json/internal/decoder/option.go new file mode 100644 index 00000000..e41f876b --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/decoder/option.go @@ -0,0 +1,15 @@ +package decoder + +import "context" + +type OptionFlags uint8 + +const ( + FirstWinOption OptionFlags = 1 << iota + ContextOption +) + +type Option struct { + Flags OptionFlags + Context context.Context +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/decoder/ptr.go b/index/server/vendor/github.com/goccy/go-json/internal/decoder/ptr.go new file mode 100644 index 00000000..2c83b9c4 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/decoder/ptr.go @@ -0,0 +1,87 @@ +package decoder + +import ( + "unsafe" + + "github.com/goccy/go-json/internal/runtime" +) + +type ptrDecoder struct { + dec Decoder + typ *runtime.Type + structName string + fieldName string +} + +func newPtrDecoder(dec Decoder, typ *runtime.Type, structName, fieldName string) *ptrDecoder { + return &ptrDecoder{ + dec: dec, + typ: typ, + structName: structName, + fieldName: fieldName, + } +} + +func (d *ptrDecoder) contentDecoder() Decoder { + dec, ok := d.dec.(*ptrDecoder) + if !ok { + return d.dec + } + return dec.contentDecoder() +} + +//nolint:golint +//go:linkname unsafe_New reflect.unsafe_New +func unsafe_New(*runtime.Type) unsafe.Pointer + +func (d *ptrDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error { + if s.skipWhiteSpace() == nul { + s.read() + } + if s.char() == 'n' { + if err := nullBytes(s); err != nil { + return err + } + *(*unsafe.Pointer)(p) = nil + return nil + } + var newptr unsafe.Pointer + if *(*unsafe.Pointer)(p) == nil { + newptr = unsafe_New(d.typ) + *(*unsafe.Pointer)(p) = newptr + } else { + newptr = *(*unsafe.Pointer)(p) + } + if err := d.dec.DecodeStream(s, depth, newptr); err != nil { + return err + } + return nil +} + +func (d *ptrDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) { + buf := ctx.Buf + cursor = skipWhiteSpace(buf, cursor) + if buf[cursor] == 'n' { + if err := validateNull(buf, cursor); err != nil { + return 0, err + } + if p != nil { + *(*unsafe.Pointer)(p) = nil + } + cursor += 4 + return cursor, nil + } + var newptr unsafe.Pointer + if *(*unsafe.Pointer)(p) == nil { + newptr = unsafe_New(d.typ) + *(*unsafe.Pointer)(p) = newptr + } else { + newptr = *(*unsafe.Pointer)(p) + } + c, err := d.dec.Decode(ctx, cursor, depth, newptr) + if err != nil { + return 0, err + } + cursor = c + return cursor, nil +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/decoder/slice.go b/index/server/vendor/github.com/goccy/go-json/internal/decoder/slice.go new file mode 100644 index 00000000..85b6e111 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/decoder/slice.go @@ -0,0 +1,301 @@ +package decoder + +import ( + "reflect" + "sync" + "unsafe" + + "github.com/goccy/go-json/internal/errors" + "github.com/goccy/go-json/internal/runtime" +) + +var ( + sliceType = runtime.Type2RType( + reflect.TypeOf((*sliceHeader)(nil)).Elem(), + ) + nilSlice = unsafe.Pointer(&sliceHeader{}) +) + +type sliceDecoder struct { + elemType *runtime.Type + isElemPointerType bool + valueDecoder Decoder + size uintptr + arrayPool sync.Pool + structName string + fieldName string +} + +// If use reflect.SliceHeader, data type is uintptr. +// In this case, Go compiler cannot trace reference created by newArray(). +// So, define using unsafe.Pointer as data type +type sliceHeader struct { + data unsafe.Pointer + len int + cap int +} + +const ( + defaultSliceCapacity = 2 +) + +func newSliceDecoder(dec Decoder, elemType *runtime.Type, size uintptr, structName, fieldName string) *sliceDecoder { + return &sliceDecoder{ + valueDecoder: dec, + elemType: elemType, + isElemPointerType: elemType.Kind() == reflect.Ptr || elemType.Kind() == reflect.Map, + size: size, + arrayPool: sync.Pool{ + New: func() interface{} { + return &sliceHeader{ + data: newArray(elemType, defaultSliceCapacity), + len: 0, + cap: defaultSliceCapacity, + } + }, + }, + structName: structName, + fieldName: fieldName, + } +} + +func (d *sliceDecoder) newSlice(src *sliceHeader) *sliceHeader { + slice := d.arrayPool.Get().(*sliceHeader) + if src.len > 0 { + // copy original elem + if slice.cap < src.cap { + data := newArray(d.elemType, src.cap) + slice = &sliceHeader{data: data, len: src.len, cap: src.cap} + } else { + slice.len = src.len + } + copySlice(d.elemType, *slice, *src) + } else { + slice.len = 0 + } + return slice +} + +func (d *sliceDecoder) releaseSlice(p *sliceHeader) { + d.arrayPool.Put(p) +} + +//go:linkname copySlice reflect.typedslicecopy +func copySlice(elemType *runtime.Type, dst, src sliceHeader) int + +//go:linkname newArray reflect.unsafe_NewArray +func newArray(*runtime.Type, int) unsafe.Pointer + +//go:linkname typedmemmove reflect.typedmemmove +func typedmemmove(t *runtime.Type, dst, src unsafe.Pointer) + +func (d *sliceDecoder) errNumber(offset int64) *errors.UnmarshalTypeError { + return &errors.UnmarshalTypeError{ + Value: "number", + Type: reflect.SliceOf(runtime.RType2Type(d.elemType)), + Struct: d.structName, + Field: d.fieldName, + Offset: offset, + } +} + +func (d *sliceDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error { + depth++ + if depth > maxDecodeNestingDepth { + return errors.ErrExceededMaxDepth(s.char(), s.cursor) + } + + for { + switch s.char() { + case ' ', '\n', '\t', '\r': + s.cursor++ + continue + case 'n': + if err := nullBytes(s); err != nil { + return err + } + typedmemmove(sliceType, p, nilSlice) + return nil + case '[': + s.cursor++ + if s.skipWhiteSpace() == ']' { + dst := (*sliceHeader)(p) + if dst.data == nil { + dst.data = newArray(d.elemType, 0) + } else { + dst.len = 0 + } + s.cursor++ + return nil + } + idx := 0 + slice := d.newSlice((*sliceHeader)(p)) + srcLen := slice.len + capacity := slice.cap + data := slice.data + for { + if capacity <= idx { + src := sliceHeader{data: data, len: idx, cap: capacity} + capacity *= 2 + data = newArray(d.elemType, capacity) + dst := sliceHeader{data: data, len: idx, cap: capacity} + copySlice(d.elemType, dst, src) + } + ep := unsafe.Pointer(uintptr(data) + uintptr(idx)*d.size) + + // if srcLen is greater than idx, keep the original reference + if srcLen <= idx { + if d.isElemPointerType { + **(**unsafe.Pointer)(unsafe.Pointer(&ep)) = nil // initialize elem pointer + } else { + // assign new element to the slice + typedmemmove(d.elemType, ep, unsafe_New(d.elemType)) + } + } + + if err := d.valueDecoder.DecodeStream(s, depth, ep); err != nil { + return err + } + s.skipWhiteSpace() + RETRY: + switch s.char() { + case ']': + slice.cap = capacity + slice.len = idx + 1 + slice.data = data + dst := (*sliceHeader)(p) + dst.len = idx + 1 + if dst.len > dst.cap { + dst.data = newArray(d.elemType, dst.len) + dst.cap = dst.len + } + copySlice(d.elemType, *dst, *slice) + d.releaseSlice(slice) + s.cursor++ + return nil + case ',': + idx++ + case nul: + if s.read() { + goto RETRY + } + slice.cap = capacity + slice.data = data + d.releaseSlice(slice) + goto ERROR + default: + slice.cap = capacity + slice.data = data + d.releaseSlice(slice) + goto ERROR + } + s.cursor++ + } + case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + return d.errNumber(s.totalOffset()) + case nul: + if s.read() { + continue + } + goto ERROR + default: + goto ERROR + } + } +ERROR: + return errors.ErrUnexpectedEndOfJSON("slice", s.totalOffset()) +} + +func (d *sliceDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) { + buf := ctx.Buf + depth++ + if depth > maxDecodeNestingDepth { + return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor) + } + + for { + switch buf[cursor] { + case ' ', '\n', '\t', '\r': + cursor++ + continue + case 'n': + if err := validateNull(buf, cursor); err != nil { + return 0, err + } + cursor += 4 + typedmemmove(sliceType, p, nilSlice) + return cursor, nil + case '[': + cursor++ + cursor = skipWhiteSpace(buf, cursor) + if buf[cursor] == ']' { + dst := (*sliceHeader)(p) + if dst.data == nil { + dst.data = newArray(d.elemType, 0) + } else { + dst.len = 0 + } + cursor++ + return cursor, nil + } + idx := 0 + slice := d.newSlice((*sliceHeader)(p)) + srcLen := slice.len + capacity := slice.cap + data := slice.data + for { + if capacity <= idx { + src := sliceHeader{data: data, len: idx, cap: capacity} + capacity *= 2 + data = newArray(d.elemType, capacity) + dst := sliceHeader{data: data, len: idx, cap: capacity} + copySlice(d.elemType, dst, src) + } + ep := unsafe.Pointer(uintptr(data) + uintptr(idx)*d.size) + // if srcLen is greater than idx, keep the original reference + if srcLen <= idx { + if d.isElemPointerType { + **(**unsafe.Pointer)(unsafe.Pointer(&ep)) = nil // initialize elem pointer + } else { + // assign new element to the slice + typedmemmove(d.elemType, ep, unsafe_New(d.elemType)) + } + } + c, err := d.valueDecoder.Decode(ctx, cursor, depth, ep) + if err != nil { + return 0, err + } + cursor = c + cursor = skipWhiteSpace(buf, cursor) + switch buf[cursor] { + case ']': + slice.cap = capacity + slice.len = idx + 1 + slice.data = data + dst := (*sliceHeader)(p) + dst.len = idx + 1 + if dst.len > dst.cap { + dst.data = newArray(d.elemType, dst.len) + dst.cap = dst.len + } + copySlice(d.elemType, *dst, *slice) + d.releaseSlice(slice) + cursor++ + return cursor, nil + case ',': + idx++ + default: + slice.cap = capacity + slice.data = data + d.releaseSlice(slice) + return 0, errors.ErrInvalidCharacter(buf[cursor], "slice", cursor) + } + cursor++ + } + case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + return 0, d.errNumber(cursor) + default: + return 0, errors.ErrUnexpectedEndOfJSON("slice", cursor) + } + } +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/decoder/stream.go b/index/server/vendor/github.com/goccy/go-json/internal/decoder/stream.go new file mode 100644 index 00000000..a383f725 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/decoder/stream.go @@ -0,0 +1,556 @@ +package decoder + +import ( + "bytes" + "encoding/json" + "io" + "strconv" + "unsafe" + + "github.com/goccy/go-json/internal/errors" +) + +const ( + initBufSize = 512 +) + +type Stream struct { + buf []byte + bufSize int64 + length int64 + r io.Reader + offset int64 + cursor int64 + filledBuffer bool + allRead bool + UseNumber bool + DisallowUnknownFields bool + Option *Option +} + +func NewStream(r io.Reader) *Stream { + return &Stream{ + r: r, + bufSize: initBufSize, + buf: make([]byte, initBufSize), + Option: &Option{}, + } +} + +func (s *Stream) TotalOffset() int64 { + return s.totalOffset() +} + +func (s *Stream) Buffered() io.Reader { + buflen := int64(len(s.buf)) + for i := s.cursor; i < buflen; i++ { + if s.buf[i] == nul { + return bytes.NewReader(s.buf[s.cursor:i]) + } + } + return bytes.NewReader(s.buf[s.cursor:]) +} + +func (s *Stream) PrepareForDecode() error { + for { + switch s.char() { + case ' ', '\t', '\r', '\n': + s.cursor++ + continue + case ',', ':': + s.cursor++ + return nil + case nul: + if s.read() { + continue + } + return io.EOF + } + break + } + return nil +} + +func (s *Stream) totalOffset() int64 { + return s.offset + s.cursor +} + +func (s *Stream) char() byte { + return s.buf[s.cursor] +} + +func (s *Stream) equalChar(c byte) bool { + cur := s.buf[s.cursor] + if cur == nul { + s.read() + cur = s.buf[s.cursor] + } + return cur == c +} + +func (s *Stream) stat() ([]byte, int64, unsafe.Pointer) { + return s.buf, s.cursor, (*sliceHeader)(unsafe.Pointer(&s.buf)).data +} + +func (s *Stream) bufptr() unsafe.Pointer { + return (*sliceHeader)(unsafe.Pointer(&s.buf)).data +} + +func (s *Stream) statForRetry() ([]byte, int64, unsafe.Pointer) { + s.cursor-- // for retry ( because caller progress cursor position in each loop ) + return s.buf, s.cursor, (*sliceHeader)(unsafe.Pointer(&s.buf)).data +} + +func (s *Stream) Reset() { + s.reset() + s.bufSize = int64(len(s.buf)) +} + +func (s *Stream) More() bool { + for { + switch s.char() { + case ' ', '\n', '\r', '\t': + s.cursor++ + continue + case '}', ']': + return false + case nul: + if s.read() { + continue + } + return false + } + break + } + return true +} + +func (s *Stream) Token() (interface{}, error) { + for { + c := s.char() + switch c { + case ' ', '\n', '\r', '\t': + s.cursor++ + case '{', '[', ']', '}': + s.cursor++ + return json.Delim(c), nil + case ',', ':': + s.cursor++ + case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + bytes := floatBytes(s) + str := *(*string)(unsafe.Pointer(&bytes)) + if s.UseNumber { + return json.Number(str), nil + } + f64, err := strconv.ParseFloat(str, 64) + if err != nil { + return nil, err + } + return f64, nil + case '"': + bytes, err := stringBytes(s) + if err != nil { + return nil, err + } + return string(bytes), nil + case 't': + if err := trueBytes(s); err != nil { + return nil, err + } + return true, nil + case 'f': + if err := falseBytes(s); err != nil { + return nil, err + } + return false, nil + case 'n': + if err := nullBytes(s); err != nil { + return nil, err + } + return nil, nil + case nul: + if s.read() { + continue + } + goto END + default: + return nil, errors.ErrInvalidCharacter(s.char(), "token", s.totalOffset()) + } + } +END: + return nil, io.EOF +} + +func (s *Stream) reset() { + s.offset += s.cursor + s.buf = s.buf[s.cursor:] + s.length -= s.cursor + s.cursor = 0 +} + +func (s *Stream) readBuf() []byte { + if s.filledBuffer { + s.bufSize *= 2 + remainBuf := s.buf + s.buf = make([]byte, s.bufSize) + copy(s.buf, remainBuf) + } + remainLen := s.length - s.cursor + remainNotNulCharNum := int64(0) + for i := int64(0); i < remainLen; i++ { + if s.buf[s.cursor+i] == nul { + break + } + remainNotNulCharNum++ + } + s.length = s.cursor + remainNotNulCharNum + return s.buf[s.cursor+remainNotNulCharNum:] +} + +func (s *Stream) read() bool { + if s.allRead { + return false + } + buf := s.readBuf() + last := len(buf) - 1 + buf[last] = nul + n, err := s.r.Read(buf[:last]) + s.length += int64(n) + if n == last { + s.filledBuffer = true + } else { + s.filledBuffer = false + } + if err == io.EOF { + s.allRead = true + } else if err != nil { + return false + } + return true +} + +func (s *Stream) skipWhiteSpace() byte { + p := s.bufptr() +LOOP: + c := char(p, s.cursor) + switch c { + case ' ', '\n', '\t', '\r': + s.cursor++ + goto LOOP + case nul: + if s.read() { + p = s.bufptr() + goto LOOP + } + } + return c +} + +func (s *Stream) skipObject(depth int64) error { + braceCount := 1 + _, cursor, p := s.stat() + for { + switch char(p, cursor) { + case '{': + braceCount++ + depth++ + if depth > maxDecodeNestingDepth { + return errors.ErrExceededMaxDepth(s.char(), s.cursor) + } + case '}': + braceCount-- + depth-- + if braceCount == 0 { + s.cursor = cursor + 1 + return nil + } + case '[': + depth++ + if depth > maxDecodeNestingDepth { + return errors.ErrExceededMaxDepth(s.char(), s.cursor) + } + case ']': + depth-- + case '"': + for { + cursor++ + switch char(p, cursor) { + case '\\': + cursor++ + if char(p, cursor) == nul { + s.cursor = cursor + if s.read() { + _, cursor, p = s.stat() + continue + } + return errors.ErrUnexpectedEndOfJSON("string of object", cursor) + } + case '"': + goto SWITCH_OUT + case nul: + s.cursor = cursor + if s.read() { + _, cursor, p = s.statForRetry() + continue + } + return errors.ErrUnexpectedEndOfJSON("string of object", cursor) + } + } + case nul: + s.cursor = cursor + if s.read() { + _, cursor, p = s.stat() + continue + } + return errors.ErrUnexpectedEndOfJSON("object of object", cursor) + } + SWITCH_OUT: + cursor++ + } +} + +func (s *Stream) skipArray(depth int64) error { + bracketCount := 1 + _, cursor, p := s.stat() + for { + switch char(p, cursor) { + case '[': + bracketCount++ + depth++ + if depth > maxDecodeNestingDepth { + return errors.ErrExceededMaxDepth(s.char(), s.cursor) + } + case ']': + bracketCount-- + depth-- + if bracketCount == 0 { + s.cursor = cursor + 1 + return nil + } + case '{': + depth++ + if depth > maxDecodeNestingDepth { + return errors.ErrExceededMaxDepth(s.char(), s.cursor) + } + case '}': + depth-- + case '"': + for { + cursor++ + switch char(p, cursor) { + case '\\': + cursor++ + if char(p, cursor) == nul { + s.cursor = cursor + if s.read() { + _, cursor, p = s.stat() + continue + } + return errors.ErrUnexpectedEndOfJSON("string of object", cursor) + } + case '"': + goto SWITCH_OUT + case nul: + s.cursor = cursor + if s.read() { + _, cursor, p = s.statForRetry() + continue + } + return errors.ErrUnexpectedEndOfJSON("string of object", cursor) + } + } + case nul: + s.cursor = cursor + if s.read() { + _, cursor, p = s.stat() + continue + } + return errors.ErrUnexpectedEndOfJSON("array of object", cursor) + } + SWITCH_OUT: + cursor++ + } +} + +func (s *Stream) skipValue(depth int64) error { + _, cursor, p := s.stat() + for { + switch char(p, cursor) { + case ' ', '\n', '\t', '\r': + cursor++ + continue + case nul: + s.cursor = cursor + if s.read() { + _, cursor, p = s.stat() + continue + } + return errors.ErrUnexpectedEndOfJSON("value of object", s.totalOffset()) + case '{': + s.cursor = cursor + 1 + return s.skipObject(depth + 1) + case '[': + s.cursor = cursor + 1 + return s.skipArray(depth + 1) + case '"': + for { + cursor++ + switch char(p, cursor) { + case '\\': + cursor++ + if char(p, cursor) == nul { + s.cursor = cursor + if s.read() { + _, cursor, p = s.stat() + continue + } + return errors.ErrUnexpectedEndOfJSON("value of string", s.totalOffset()) + } + case '"': + s.cursor = cursor + 1 + return nil + case nul: + s.cursor = cursor + if s.read() { + _, cursor, p = s.statForRetry() + continue + } + return errors.ErrUnexpectedEndOfJSON("value of string", s.totalOffset()) + } + } + case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + for { + cursor++ + c := char(p, cursor) + if floatTable[c] { + continue + } else if c == nul { + if s.read() { + _, cursor, p = s.stat() + continue + } + } + s.cursor = cursor + return nil + } + case 't': + s.cursor = cursor + if err := trueBytes(s); err != nil { + return err + } + return nil + case 'f': + s.cursor = cursor + if err := falseBytes(s); err != nil { + return err + } + return nil + case 'n': + s.cursor = cursor + if err := nullBytes(s); err != nil { + return err + } + return nil + } + cursor++ + } +} + +func nullBytes(s *Stream) error { + // current cursor's character is 'n' + s.cursor++ + if s.char() != 'u' { + if err := retryReadNull(s); err != nil { + return err + } + } + s.cursor++ + if s.char() != 'l' { + if err := retryReadNull(s); err != nil { + return err + } + } + s.cursor++ + if s.char() != 'l' { + if err := retryReadNull(s); err != nil { + return err + } + } + s.cursor++ + return nil +} + +func retryReadNull(s *Stream) error { + if s.char() == nul && s.read() { + return nil + } + return errors.ErrInvalidCharacter(s.char(), "null", s.totalOffset()) +} + +func trueBytes(s *Stream) error { + // current cursor's character is 't' + s.cursor++ + if s.char() != 'r' { + if err := retryReadTrue(s); err != nil { + return err + } + } + s.cursor++ + if s.char() != 'u' { + if err := retryReadTrue(s); err != nil { + return err + } + } + s.cursor++ + if s.char() != 'e' { + if err := retryReadTrue(s); err != nil { + return err + } + } + s.cursor++ + return nil +} + +func retryReadTrue(s *Stream) error { + if s.char() == nul && s.read() { + return nil + } + return errors.ErrInvalidCharacter(s.char(), "bool(true)", s.totalOffset()) +} + +func falseBytes(s *Stream) error { + // current cursor's character is 'f' + s.cursor++ + if s.char() != 'a' { + if err := retryReadFalse(s); err != nil { + return err + } + } + s.cursor++ + if s.char() != 'l' { + if err := retryReadFalse(s); err != nil { + return err + } + } + s.cursor++ + if s.char() != 's' { + if err := retryReadFalse(s); err != nil { + return err + } + } + s.cursor++ + if s.char() != 'e' { + if err := retryReadFalse(s); err != nil { + return err + } + } + s.cursor++ + return nil +} + +func retryReadFalse(s *Stream) error { + if s.char() == nul && s.read() { + return nil + } + return errors.ErrInvalidCharacter(s.char(), "bool(false)", s.totalOffset()) +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/decoder/string.go b/index/server/vendor/github.com/goccy/go-json/internal/decoder/string.go new file mode 100644 index 00000000..d07ad710 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/decoder/string.go @@ -0,0 +1,441 @@ +package decoder + +import ( + "bytes" + "fmt" + "reflect" + "unicode" + "unicode/utf16" + "unicode/utf8" + "unsafe" + + "github.com/goccy/go-json/internal/errors" +) + +type stringDecoder struct { + structName string + fieldName string +} + +func newStringDecoder(structName, fieldName string) *stringDecoder { + return &stringDecoder{ + structName: structName, + fieldName: fieldName, + } +} + +func (d *stringDecoder) errUnmarshalType(typeName string, offset int64) *errors.UnmarshalTypeError { + return &errors.UnmarshalTypeError{ + Value: typeName, + Type: reflect.TypeOf(""), + Offset: offset, + Struct: d.structName, + Field: d.fieldName, + } +} + +func (d *stringDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error { + bytes, err := d.decodeStreamByte(s) + if err != nil { + return err + } + if bytes == nil { + return nil + } + **(**string)(unsafe.Pointer(&p)) = *(*string)(unsafe.Pointer(&bytes)) + s.reset() + return nil +} + +func (d *stringDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) { + bytes, c, err := d.decodeByte(ctx.Buf, cursor) + if err != nil { + return 0, err + } + if bytes == nil { + return c, nil + } + cursor = c + **(**string)(unsafe.Pointer(&p)) = *(*string)(unsafe.Pointer(&bytes)) + return cursor, nil +} + +var ( + hexToInt = [256]int{ + '0': 0, + '1': 1, + '2': 2, + '3': 3, + '4': 4, + '5': 5, + '6': 6, + '7': 7, + '8': 8, + '9': 9, + 'A': 10, + 'B': 11, + 'C': 12, + 'D': 13, + 'E': 14, + 'F': 15, + 'a': 10, + 'b': 11, + 'c': 12, + 'd': 13, + 'e': 14, + 'f': 15, + } +) + +func unicodeToRune(code []byte) rune { + var r rune + for i := 0; i < len(code); i++ { + r = r*16 + rune(hexToInt[code[i]]) + } + return r +} + +func readAtLeast(s *Stream, n int64, p *unsafe.Pointer) bool { + for s.cursor+n >= s.length { + if !s.read() { + return false + } + *p = s.bufptr() + } + return true +} + +func decodeUnicodeRune(s *Stream, p unsafe.Pointer) (rune, int64, unsafe.Pointer, error) { + const defaultOffset = 5 + const surrogateOffset = 11 + + if !readAtLeast(s, defaultOffset, &p) { + return rune(0), 0, nil, errors.ErrInvalidCharacter(s.char(), "escaped string", s.totalOffset()) + } + + r := unicodeToRune(s.buf[s.cursor+1 : s.cursor+defaultOffset]) + if utf16.IsSurrogate(r) { + if !readAtLeast(s, surrogateOffset, &p) { + return unicode.ReplacementChar, defaultOffset, p, nil + } + if s.buf[s.cursor+defaultOffset] != '\\' || s.buf[s.cursor+defaultOffset+1] != 'u' { + return unicode.ReplacementChar, defaultOffset, p, nil + } + r2 := unicodeToRune(s.buf[s.cursor+defaultOffset+2 : s.cursor+surrogateOffset]) + if r := utf16.DecodeRune(r, r2); r != unicode.ReplacementChar { + return r, surrogateOffset, p, nil + } + } + return r, defaultOffset, p, nil +} + +func decodeUnicode(s *Stream, p unsafe.Pointer) (unsafe.Pointer, error) { + const backSlashAndULen = 2 // length of \u + + r, offset, pp, err := decodeUnicodeRune(s, p) + if err != nil { + return nil, err + } + unicode := []byte(string(r)) + unicodeLen := int64(len(unicode)) + s.buf = append(append(s.buf[:s.cursor-1], unicode...), s.buf[s.cursor+offset:]...) + unicodeOrgLen := offset - 1 + s.length = s.length - (backSlashAndULen + (unicodeOrgLen - unicodeLen)) + s.cursor = s.cursor - backSlashAndULen + unicodeLen + return pp, nil +} + +func decodeEscapeString(s *Stream, p unsafe.Pointer) (unsafe.Pointer, error) { + s.cursor++ +RETRY: + switch s.buf[s.cursor] { + case '"': + s.buf[s.cursor] = '"' + case '\\': + s.buf[s.cursor] = '\\' + case '/': + s.buf[s.cursor] = '/' + case 'b': + s.buf[s.cursor] = '\b' + case 'f': + s.buf[s.cursor] = '\f' + case 'n': + s.buf[s.cursor] = '\n' + case 'r': + s.buf[s.cursor] = '\r' + case 't': + s.buf[s.cursor] = '\t' + case 'u': + return decodeUnicode(s, p) + case nul: + if !s.read() { + return nil, errors.ErrInvalidCharacter(s.char(), "escaped string", s.totalOffset()) + } + p = s.bufptr() + goto RETRY + default: + return nil, errors.ErrUnexpectedEndOfJSON("string", s.totalOffset()) + } + s.buf = append(s.buf[:s.cursor-1], s.buf[s.cursor:]...) + s.length-- + s.cursor-- + p = s.bufptr() + return p, nil +} + +var ( + runeErrBytes = []byte(string(utf8.RuneError)) + runeErrBytesLen = int64(len(runeErrBytes)) +) + +func stringBytes(s *Stream) ([]byte, error) { + _, cursor, p := s.stat() + cursor++ // skip double quote char + start := cursor + for { + switch char(p, cursor) { + case '\\': + s.cursor = cursor + pp, err := decodeEscapeString(s, p) + if err != nil { + return nil, err + } + p = pp + cursor = s.cursor + case '"': + literal := s.buf[start:cursor] + cursor++ + s.cursor = cursor + return literal, nil + case + // 0x00 is nul, 0x5c is '\\', 0x22 is '"' . + 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, // 0x00-0x0F + 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E, 0x1F, // 0x10-0x1F + 0x20, 0x21 /*0x22,*/, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2A, 0x2B, 0x2C, 0x2D, 0x2E, 0x2F, // 0x20-0x2F + 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3A, 0x3B, 0x3C, 0x3D, 0x3E, 0x3F, // 0x30-0x3F + 0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x4A, 0x4B, 0x4C, 0x4D, 0x4E, 0x4F, // 0x40-0x4F + 0x50, 0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5A, 0x5B /*0x5C,*/, 0x5D, 0x5E, 0x5F, // 0x50-0x5F + 0x60, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69, 0x6A, 0x6B, 0x6C, 0x6D, 0x6E, 0x6F, // 0x60-0x6F + 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7A, 0x7B, 0x7C, 0x7D, 0x7E, 0x7F: // 0x70-0x7F + // character is ASCII. skip to next char + case + 0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8A, 0x8B, 0x8C, 0x8D, 0x8E, 0x8F, // 0x80-0x8F + 0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98, 0x99, 0x9A, 0x9B, 0x9C, 0x9D, 0x9E, 0x9F, // 0x90-0x9F + 0xA0, 0xA1, 0xA2, 0xA3, 0xA4, 0xA5, 0xA6, 0xA7, 0xA8, 0xA9, 0xAA, 0xAB, 0xAC, 0xAD, 0xAE, 0xAF, // 0xA0-0xAF + 0xB0, 0xB1, 0xB2, 0xB3, 0xB4, 0xB5, 0xB6, 0xB7, 0xB8, 0xB9, 0xBA, 0xBB, 0xBC, 0xBD, 0xBE, 0xBF, // 0xB0-0xBF + 0xC0, 0xC1, // 0xC0-0xC1 + 0xF5, 0xF6, 0xF7, 0xF8, 0xF9, 0xFA, 0xFB, 0xFC, 0xFD, 0xFE, 0xFF: // 0xF5-0xFE + // character is invalid + s.buf = append(append(append([]byte{}, s.buf[:cursor]...), runeErrBytes...), s.buf[cursor+1:]...) + _, _, p = s.stat() + cursor += runeErrBytesLen + s.length += runeErrBytesLen + continue + case nul: + s.cursor = cursor + if s.read() { + _, cursor, p = s.stat() + continue + } + goto ERROR + case 0xEF: + // RuneError is {0xEF, 0xBF, 0xBD} + if s.buf[cursor+1] == 0xBF && s.buf[cursor+2] == 0xBD { + // found RuneError: skip + cursor += 2 + break + } + fallthrough + default: + // multi bytes character + if !utf8.FullRune(s.buf[cursor : len(s.buf)-1]) { + s.cursor = cursor + if s.read() { + _, cursor, p = s.stat() + continue + } + goto ERROR + } + r, size := utf8.DecodeRune(s.buf[cursor:]) + if r == utf8.RuneError { + s.buf = append(append(append([]byte{}, s.buf[:cursor]...), runeErrBytes...), s.buf[cursor+1:]...) + cursor += runeErrBytesLen + s.length += runeErrBytesLen + _, _, p = s.stat() + } else { + cursor += int64(size) + } + continue + } + cursor++ + } +ERROR: + return nil, errors.ErrUnexpectedEndOfJSON("string", s.totalOffset()) +} + +func (d *stringDecoder) decodeStreamByte(s *Stream) ([]byte, error) { + for { + switch s.char() { + case ' ', '\n', '\t', '\r': + s.cursor++ + continue + case '[': + return nil, d.errUnmarshalType("array", s.totalOffset()) + case '{': + return nil, d.errUnmarshalType("object", s.totalOffset()) + case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + return nil, d.errUnmarshalType("number", s.totalOffset()) + case '"': + return stringBytes(s) + case 'n': + if err := nullBytes(s); err != nil { + return nil, err + } + return nil, nil + case nul: + if s.read() { + continue + } + } + break + } + return nil, errors.ErrInvalidBeginningOfValue(s.char(), s.totalOffset()) +} + +func (d *stringDecoder) decodeByte(buf []byte, cursor int64) ([]byte, int64, error) { + for { + switch buf[cursor] { + case ' ', '\n', '\t', '\r': + cursor++ + case '[': + return nil, 0, d.errUnmarshalType("array", cursor) + case '{': + return nil, 0, d.errUnmarshalType("object", cursor) + case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + return nil, 0, d.errUnmarshalType("number", cursor) + case '"': + cursor++ + start := cursor + b := (*sliceHeader)(unsafe.Pointer(&buf)).data + escaped := 0 + for { + switch char(b, cursor) { + case '\\': + escaped++ + cursor++ + switch char(b, cursor) { + case '"', '\\', '/', 'b', 'f', 'n', 'r', 't': + cursor++ + case 'u': + buflen := int64(len(buf)) + if cursor+5 >= buflen { + return nil, 0, errors.ErrUnexpectedEndOfJSON("escaped string", cursor) + } + for i := int64(1); i <= 4; i++ { + c := char(b, cursor+i) + if !(('0' <= c && c <= '9') || ('a' <= c && c <= 'f') || ('A' <= c && c <= 'F')) { + return nil, 0, errors.ErrSyntax(fmt.Sprintf("json: invalid character %c in \\u hexadecimal character escape", c), cursor+i) + } + } + cursor += 5 + default: + return nil, 0, errors.ErrUnexpectedEndOfJSON("escaped string", cursor) + } + continue + case '"': + literal := buf[start:cursor] + if escaped > 0 { + literal = literal[:unescapeString(literal)] + } + cursor++ + return literal, cursor, nil + case nul: + return nil, 0, errors.ErrUnexpectedEndOfJSON("string", cursor) + } + cursor++ + } + case 'n': + if err := validateNull(buf, cursor); err != nil { + return nil, 0, err + } + cursor += 4 + return nil, cursor, nil + default: + return nil, 0, errors.ErrInvalidBeginningOfValue(buf[cursor], cursor) + } + } +} + +var unescapeMap = [256]byte{ + '"': '"', + '\\': '\\', + '/': '/', + 'b': '\b', + 'f': '\f', + 'n': '\n', + 'r': '\r', + 't': '\t', +} + +func unsafeAdd(ptr unsafe.Pointer, offset int) unsafe.Pointer { + return unsafe.Pointer(uintptr(ptr) + uintptr(offset)) +} + +func unescapeString(buf []byte) int { + p := (*sliceHeader)(unsafe.Pointer(&buf)).data + end := unsafeAdd(p, len(buf)) + src := unsafeAdd(p, bytes.IndexByte(buf, '\\')) + dst := src + for src != end { + c := char(src, 0) + if c == '\\' { + escapeChar := char(src, 1) + if escapeChar != 'u' { + *(*byte)(dst) = unescapeMap[escapeChar] + src = unsafeAdd(src, 2) + dst = unsafeAdd(dst, 1) + } else { + v1 := hexToInt[char(src, 2)] + v2 := hexToInt[char(src, 3)] + v3 := hexToInt[char(src, 4)] + v4 := hexToInt[char(src, 5)] + code := rune((v1 << 12) | (v2 << 8) | (v3 << 4) | v4) + if code >= 0xd800 && code < 0xdc00 && uintptr(unsafeAdd(src, 11)) < uintptr(end) { + if char(src, 6) == '\\' && char(src, 7) == 'u' { + v1 := hexToInt[char(src, 8)] + v2 := hexToInt[char(src, 9)] + v3 := hexToInt[char(src, 10)] + v4 := hexToInt[char(src, 11)] + lo := rune((v1 << 12) | (v2 << 8) | (v3 << 4) | v4) + if lo >= 0xdc00 && lo < 0xe000 { + code = (code-0xd800)<<10 | (lo - 0xdc00) + 0x10000 + src = unsafeAdd(src, 6) + } + } + } + var b [utf8.UTFMax]byte + n := utf8.EncodeRune(b[:], code) + switch n { + case 4: + *(*byte)(unsafeAdd(dst, 3)) = b[3] + fallthrough + case 3: + *(*byte)(unsafeAdd(dst, 2)) = b[2] + fallthrough + case 2: + *(*byte)(unsafeAdd(dst, 1)) = b[1] + fallthrough + case 1: + *(*byte)(unsafeAdd(dst, 0)) = b[0] + } + src = unsafeAdd(src, 6) + dst = unsafeAdd(dst, n) + } + } else { + *(*byte)(dst) = c + src = unsafeAdd(src, 1) + dst = unsafeAdd(dst, 1) + } + } + return int(uintptr(dst) - uintptr(p)) +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/decoder/struct.go b/index/server/vendor/github.com/goccy/go-json/internal/decoder/struct.go new file mode 100644 index 00000000..2c646804 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/decoder/struct.go @@ -0,0 +1,819 @@ +package decoder + +import ( + "fmt" + "math" + "math/bits" + "sort" + "strings" + "unicode" + "unicode/utf16" + "unsafe" + + "github.com/goccy/go-json/internal/errors" +) + +type structFieldSet struct { + dec Decoder + offset uintptr + isTaggedKey bool + fieldIdx int + key string + keyLen int64 + err error +} + +type structDecoder struct { + fieldMap map[string]*structFieldSet + fieldUniqueNameNum int + stringDecoder *stringDecoder + structName string + fieldName string + isTriedOptimize bool + keyBitmapUint8 [][256]uint8 + keyBitmapUint16 [][256]uint16 + sortedFieldSets []*structFieldSet + keyDecoder func(*structDecoder, []byte, int64) (int64, *structFieldSet, error) + keyStreamDecoder func(*structDecoder, *Stream) (*structFieldSet, string, error) +} + +var ( + largeToSmallTable [256]byte +) + +func init() { + for i := 0; i < 256; i++ { + c := i + if 'A' <= c && c <= 'Z' { + c += 'a' - 'A' + } + largeToSmallTable[i] = byte(c) + } +} + +func newStructDecoder(structName, fieldName string, fieldMap map[string]*structFieldSet) *structDecoder { + return &structDecoder{ + fieldMap: fieldMap, + stringDecoder: newStringDecoder(structName, fieldName), + structName: structName, + fieldName: fieldName, + keyDecoder: decodeKey, + keyStreamDecoder: decodeKeyStream, + } +} + +const ( + allowOptimizeMaxKeyLen = 64 + allowOptimizeMaxFieldLen = 16 +) + +func (d *structDecoder) tryOptimize() { + fieldUniqueNameMap := map[string]int{} + fieldIdx := -1 + for k, v := range d.fieldMap { + lower := strings.ToLower(k) + idx, exists := fieldUniqueNameMap[lower] + if exists { + v.fieldIdx = idx + } else { + fieldIdx++ + v.fieldIdx = fieldIdx + } + fieldUniqueNameMap[lower] = fieldIdx + } + d.fieldUniqueNameNum = len(fieldUniqueNameMap) + + if d.isTriedOptimize { + return + } + fieldMap := map[string]*structFieldSet{} + conflicted := map[string]struct{}{} + for k, v := range d.fieldMap { + key := strings.ToLower(k) + if key != k { + // already exists same key (e.g. Hello and HELLO has same lower case key + if _, exists := conflicted[key]; exists { + d.isTriedOptimize = true + return + } + conflicted[key] = struct{}{} + } + if field, exists := fieldMap[key]; exists { + if field != v { + d.isTriedOptimize = true + return + } + } + fieldMap[key] = v + } + + if len(fieldMap) > allowOptimizeMaxFieldLen { + d.isTriedOptimize = true + return + } + + var maxKeyLen int + sortedKeys := []string{} + for key := range fieldMap { + keyLen := len(key) + if keyLen > allowOptimizeMaxKeyLen { + d.isTriedOptimize = true + return + } + if maxKeyLen < keyLen { + maxKeyLen = keyLen + } + sortedKeys = append(sortedKeys, key) + } + sort.Strings(sortedKeys) + + // By allocating one extra capacity than `maxKeyLen`, + // it is possible to avoid the process of comparing the index of the key with the length of the bitmap each time. + bitmapLen := maxKeyLen + 1 + if len(sortedKeys) <= 8 { + keyBitmap := make([][256]uint8, bitmapLen) + for i, key := range sortedKeys { + for j := 0; j < len(key); j++ { + c := key[j] + keyBitmap[j][c] |= (1 << uint(i)) + } + d.sortedFieldSets = append(d.sortedFieldSets, fieldMap[key]) + } + d.keyBitmapUint8 = keyBitmap + d.keyDecoder = decodeKeyByBitmapUint8 + d.keyStreamDecoder = decodeKeyByBitmapUint8Stream + } else { + keyBitmap := make([][256]uint16, bitmapLen) + for i, key := range sortedKeys { + for j := 0; j < len(key); j++ { + c := key[j] + keyBitmap[j][c] |= (1 << uint(i)) + } + d.sortedFieldSets = append(d.sortedFieldSets, fieldMap[key]) + } + d.keyBitmapUint16 = keyBitmap + d.keyDecoder = decodeKeyByBitmapUint16 + d.keyStreamDecoder = decodeKeyByBitmapUint16Stream + } +} + +// decode from '\uXXXX' +func decodeKeyCharByUnicodeRune(buf []byte, cursor int64) ([]byte, int64) { + const defaultOffset = 4 + const surrogateOffset = 6 + + r := unicodeToRune(buf[cursor : cursor+defaultOffset]) + if utf16.IsSurrogate(r) { + cursor += defaultOffset + if cursor+surrogateOffset >= int64(len(buf)) || buf[cursor] != '\\' || buf[cursor+1] != 'u' { + return []byte(string(unicode.ReplacementChar)), cursor + defaultOffset - 1 + } + cursor += 2 + r2 := unicodeToRune(buf[cursor : cursor+defaultOffset]) + if r := utf16.DecodeRune(r, r2); r != unicode.ReplacementChar { + return []byte(string(r)), cursor + defaultOffset - 1 + } + } + return []byte(string(r)), cursor + defaultOffset - 1 +} + +func decodeKeyCharByEscapedChar(buf []byte, cursor int64) ([]byte, int64) { + c := buf[cursor] + cursor++ + switch c { + case '"': + return []byte{'"'}, cursor + case '\\': + return []byte{'\\'}, cursor + case '/': + return []byte{'/'}, cursor + case 'b': + return []byte{'\b'}, cursor + case 'f': + return []byte{'\f'}, cursor + case 'n': + return []byte{'\n'}, cursor + case 'r': + return []byte{'\r'}, cursor + case 't': + return []byte{'\t'}, cursor + case 'u': + return decodeKeyCharByUnicodeRune(buf, cursor) + } + return nil, cursor +} + +func decodeKeyByBitmapUint8(d *structDecoder, buf []byte, cursor int64) (int64, *structFieldSet, error) { + var ( + curBit uint8 = math.MaxUint8 + ) + b := (*sliceHeader)(unsafe.Pointer(&buf)).data + for { + switch char(b, cursor) { + case ' ', '\n', '\t', '\r': + cursor++ + case '"': + cursor++ + c := char(b, cursor) + switch c { + case '"': + cursor++ + return cursor, nil, nil + case nul: + return 0, nil, errors.ErrUnexpectedEndOfJSON("string", cursor) + } + keyIdx := 0 + bitmap := d.keyBitmapUint8 + start := cursor + for { + c := char(b, cursor) + switch c { + case '"': + fieldSetIndex := bits.TrailingZeros8(curBit) + field := d.sortedFieldSets[fieldSetIndex] + keyLen := cursor - start + cursor++ + if keyLen < field.keyLen { + // early match + return cursor, nil, nil + } + return cursor, field, nil + case nul: + return 0, nil, errors.ErrUnexpectedEndOfJSON("string", cursor) + case '\\': + cursor++ + chars, nextCursor := decodeKeyCharByEscapedChar(buf, cursor) + for _, c := range chars { + curBit &= bitmap[keyIdx][largeToSmallTable[c]] + if curBit == 0 { + return decodeKeyNotFound(b, cursor) + } + keyIdx++ + } + cursor = nextCursor + default: + curBit &= bitmap[keyIdx][largeToSmallTable[c]] + if curBit == 0 { + return decodeKeyNotFound(b, cursor) + } + keyIdx++ + } + cursor++ + } + default: + return cursor, nil, errors.ErrInvalidBeginningOfValue(char(b, cursor), cursor) + } + } +} + +func decodeKeyByBitmapUint16(d *structDecoder, buf []byte, cursor int64) (int64, *structFieldSet, error) { + var ( + curBit uint16 = math.MaxUint16 + ) + b := (*sliceHeader)(unsafe.Pointer(&buf)).data + for { + switch char(b, cursor) { + case ' ', '\n', '\t', '\r': + cursor++ + case '"': + cursor++ + c := char(b, cursor) + switch c { + case '"': + cursor++ + return cursor, nil, nil + case nul: + return 0, nil, errors.ErrUnexpectedEndOfJSON("string", cursor) + } + keyIdx := 0 + bitmap := d.keyBitmapUint16 + start := cursor + for { + c := char(b, cursor) + switch c { + case '"': + fieldSetIndex := bits.TrailingZeros16(curBit) + field := d.sortedFieldSets[fieldSetIndex] + keyLen := cursor - start + cursor++ + if keyLen < field.keyLen { + // early match + return cursor, nil, nil + } + return cursor, field, nil + case nul: + return 0, nil, errors.ErrUnexpectedEndOfJSON("string", cursor) + case '\\': + cursor++ + chars, nextCursor := decodeKeyCharByEscapedChar(buf, cursor) + for _, c := range chars { + curBit &= bitmap[keyIdx][largeToSmallTable[c]] + if curBit == 0 { + return decodeKeyNotFound(b, cursor) + } + keyIdx++ + } + cursor = nextCursor + default: + curBit &= bitmap[keyIdx][largeToSmallTable[c]] + if curBit == 0 { + return decodeKeyNotFound(b, cursor) + } + keyIdx++ + } + cursor++ + } + default: + return cursor, nil, errors.ErrInvalidBeginningOfValue(char(b, cursor), cursor) + } + } +} + +func decodeKeyNotFound(b unsafe.Pointer, cursor int64) (int64, *structFieldSet, error) { + for { + cursor++ + switch char(b, cursor) { + case '"': + cursor++ + return cursor, nil, nil + case '\\': + cursor++ + if char(b, cursor) == nul { + return 0, nil, errors.ErrUnexpectedEndOfJSON("string", cursor) + } + case nul: + return 0, nil, errors.ErrUnexpectedEndOfJSON("string", cursor) + } + } +} + +func decodeKey(d *structDecoder, buf []byte, cursor int64) (int64, *structFieldSet, error) { + key, c, err := d.stringDecoder.decodeByte(buf, cursor) + if err != nil { + return 0, nil, err + } + cursor = c + k := *(*string)(unsafe.Pointer(&key)) + field, exists := d.fieldMap[k] + if !exists { + return cursor, nil, nil + } + return cursor, field, nil +} + +func decodeKeyByBitmapUint8Stream(d *structDecoder, s *Stream) (*structFieldSet, string, error) { + var ( + curBit uint8 = math.MaxUint8 + ) + _, cursor, p := s.stat() + for { + switch char(p, cursor) { + case ' ', '\n', '\t', '\r': + cursor++ + case nul: + s.cursor = cursor + if s.read() { + _, cursor, p = s.stat() + continue + } + return nil, "", errors.ErrInvalidBeginningOfValue(char(p, cursor), s.totalOffset()) + case '"': + cursor++ + FIRST_CHAR: + start := cursor + switch char(p, cursor) { + case '"': + cursor++ + s.cursor = cursor + return nil, "", nil + case nul: + s.cursor = cursor + if s.read() { + _, cursor, p = s.stat() + goto FIRST_CHAR + } + return nil, "", errors.ErrUnexpectedEndOfJSON("string", s.totalOffset()) + } + keyIdx := 0 + bitmap := d.keyBitmapUint8 + for { + c := char(p, cursor) + switch c { + case '"': + fieldSetIndex := bits.TrailingZeros8(curBit) + field := d.sortedFieldSets[fieldSetIndex] + keyLen := cursor - start + cursor++ + s.cursor = cursor + if keyLen < field.keyLen { + // early match + return nil, field.key, nil + } + return field, field.key, nil + case nul: + s.cursor = cursor + if s.read() { + _, cursor, p = s.stat() + continue + } + return nil, "", errors.ErrUnexpectedEndOfJSON("string", s.totalOffset()) + case '\\': + s.cursor = cursor + 1 // skip '\' char + chars, err := decodeKeyCharByEscapeCharStream(s) + if err != nil { + return nil, "", err + } + cursor = s.cursor + for _, c := range chars { + curBit &= bitmap[keyIdx][largeToSmallTable[c]] + if curBit == 0 { + s.cursor = cursor + return decodeKeyNotFoundStream(s, start) + } + keyIdx++ + } + default: + curBit &= bitmap[keyIdx][largeToSmallTable[c]] + if curBit == 0 { + s.cursor = cursor + return decodeKeyNotFoundStream(s, start) + } + keyIdx++ + } + cursor++ + } + default: + return nil, "", errors.ErrInvalidBeginningOfValue(char(p, cursor), s.totalOffset()) + } + } +} + +func decodeKeyByBitmapUint16Stream(d *structDecoder, s *Stream) (*structFieldSet, string, error) { + var ( + curBit uint16 = math.MaxUint16 + ) + _, cursor, p := s.stat() + for { + switch char(p, cursor) { + case ' ', '\n', '\t', '\r': + cursor++ + case nul: + s.cursor = cursor + if s.read() { + _, cursor, p = s.stat() + continue + } + return nil, "", errors.ErrInvalidBeginningOfValue(char(p, cursor), s.totalOffset()) + case '"': + cursor++ + FIRST_CHAR: + start := cursor + switch char(p, cursor) { + case '"': + cursor++ + s.cursor = cursor + return nil, "", nil + case nul: + s.cursor = cursor + if s.read() { + _, cursor, p = s.stat() + goto FIRST_CHAR + } + return nil, "", errors.ErrUnexpectedEndOfJSON("string", s.totalOffset()) + } + keyIdx := 0 + bitmap := d.keyBitmapUint16 + for { + c := char(p, cursor) + switch c { + case '"': + fieldSetIndex := bits.TrailingZeros16(curBit) + field := d.sortedFieldSets[fieldSetIndex] + keyLen := cursor - start + cursor++ + s.cursor = cursor + if keyLen < field.keyLen { + // early match + return nil, field.key, nil + } + return field, field.key, nil + case nul: + s.cursor = cursor + if s.read() { + _, cursor, p = s.stat() + continue + } + return nil, "", errors.ErrUnexpectedEndOfJSON("string", s.totalOffset()) + case '\\': + s.cursor = cursor + 1 // skip '\' char + chars, err := decodeKeyCharByEscapeCharStream(s) + if err != nil { + return nil, "", err + } + cursor = s.cursor + for _, c := range chars { + curBit &= bitmap[keyIdx][largeToSmallTable[c]] + if curBit == 0 { + s.cursor = cursor + return decodeKeyNotFoundStream(s, start) + } + keyIdx++ + } + default: + curBit &= bitmap[keyIdx][largeToSmallTable[c]] + if curBit == 0 { + s.cursor = cursor + return decodeKeyNotFoundStream(s, start) + } + keyIdx++ + } + cursor++ + } + default: + return nil, "", errors.ErrInvalidBeginningOfValue(char(p, cursor), s.totalOffset()) + } + } +} + +// decode from '\uXXXX' +func decodeKeyCharByUnicodeRuneStream(s *Stream) ([]byte, error) { + const defaultOffset = 4 + const surrogateOffset = 6 + + if s.cursor+defaultOffset >= s.length { + if !s.read() { + return nil, errors.ErrInvalidCharacter(s.char(), "escaped unicode char", s.totalOffset()) + } + } + + r := unicodeToRune(s.buf[s.cursor : s.cursor+defaultOffset]) + if utf16.IsSurrogate(r) { + s.cursor += defaultOffset + if s.cursor+surrogateOffset >= s.length { + s.read() + } + if s.cursor+surrogateOffset >= s.length || s.buf[s.cursor] != '\\' || s.buf[s.cursor+1] != 'u' { + s.cursor += defaultOffset - 1 + return []byte(string(unicode.ReplacementChar)), nil + } + r2 := unicodeToRune(s.buf[s.cursor+defaultOffset+2 : s.cursor+surrogateOffset]) + if r := utf16.DecodeRune(r, r2); r != unicode.ReplacementChar { + s.cursor += defaultOffset - 1 + return []byte(string(r)), nil + } + } + s.cursor += defaultOffset - 1 + return []byte(string(r)), nil +} + +func decodeKeyCharByEscapeCharStream(s *Stream) ([]byte, error) { + c := s.buf[s.cursor] + s.cursor++ +RETRY: + switch c { + case '"': + return []byte{'"'}, nil + case '\\': + return []byte{'\\'}, nil + case '/': + return []byte{'/'}, nil + case 'b': + return []byte{'\b'}, nil + case 'f': + return []byte{'\f'}, nil + case 'n': + return []byte{'\n'}, nil + case 'r': + return []byte{'\r'}, nil + case 't': + return []byte{'\t'}, nil + case 'u': + return decodeKeyCharByUnicodeRuneStream(s) + case nul: + if !s.read() { + return nil, errors.ErrInvalidCharacter(s.char(), "escaped char", s.totalOffset()) + } + goto RETRY + default: + return nil, errors.ErrUnexpectedEndOfJSON("struct field", s.totalOffset()) + } +} + +func decodeKeyNotFoundStream(s *Stream, start int64) (*structFieldSet, string, error) { + buf, cursor, p := s.stat() + for { + cursor++ + switch char(p, cursor) { + case '"': + b := buf[start:cursor] + key := *(*string)(unsafe.Pointer(&b)) + cursor++ + s.cursor = cursor + return nil, key, nil + case '\\': + cursor++ + if char(p, cursor) == nul { + s.cursor = cursor + if !s.read() { + return nil, "", errors.ErrUnexpectedEndOfJSON("string", s.totalOffset()) + } + buf, cursor, p = s.statForRetry() + } + case nul: + s.cursor = cursor + if !s.read() { + return nil, "", errors.ErrUnexpectedEndOfJSON("string", s.totalOffset()) + } + buf, cursor, p = s.statForRetry() + } + } +} + +func decodeKeyStream(d *structDecoder, s *Stream) (*structFieldSet, string, error) { + key, err := d.stringDecoder.decodeStreamByte(s) + if err != nil { + return nil, "", err + } + k := *(*string)(unsafe.Pointer(&key)) + return d.fieldMap[k], k, nil +} + +func (d *structDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error { + depth++ + if depth > maxDecodeNestingDepth { + return errors.ErrExceededMaxDepth(s.char(), s.cursor) + } + + c := s.skipWhiteSpace() + switch c { + case 'n': + if err := nullBytes(s); err != nil { + return err + } + return nil + default: + if s.char() != '{' { + return errors.ErrInvalidBeginningOfValue(s.char(), s.totalOffset()) + } + } + s.cursor++ + if s.skipWhiteSpace() == '}' { + s.cursor++ + return nil + } + var ( + seenFields map[int]struct{} + seenFieldNum int + ) + firstWin := (s.Option.Flags & FirstWinOption) != 0 + if firstWin { + seenFields = make(map[int]struct{}, d.fieldUniqueNameNum) + } + for { + s.reset() + field, key, err := d.keyStreamDecoder(d, s) + if err != nil { + return err + } + if s.skipWhiteSpace() != ':' { + return errors.ErrExpected("colon after object key", s.totalOffset()) + } + s.cursor++ + if field != nil { + if field.err != nil { + return field.err + } + if firstWin { + if _, exists := seenFields[field.fieldIdx]; exists { + if err := s.skipValue(depth); err != nil { + return err + } + } else { + if err := field.dec.DecodeStream(s, depth, unsafe.Pointer(uintptr(p)+field.offset)); err != nil { + return err + } + seenFieldNum++ + if d.fieldUniqueNameNum <= seenFieldNum { + return s.skipObject(depth) + } + seenFields[field.fieldIdx] = struct{}{} + } + } else { + if err := field.dec.DecodeStream(s, depth, unsafe.Pointer(uintptr(p)+field.offset)); err != nil { + return err + } + } + } else if s.DisallowUnknownFields { + return fmt.Errorf("json: unknown field %q", key) + } else { + if err := s.skipValue(depth); err != nil { + return err + } + } + c := s.skipWhiteSpace() + if c == '}' { + s.cursor++ + return nil + } + if c != ',' { + return errors.ErrExpected("comma after object element", s.totalOffset()) + } + s.cursor++ + } +} + +func (d *structDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) { + buf := ctx.Buf + depth++ + if depth > maxDecodeNestingDepth { + return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor) + } + buflen := int64(len(buf)) + cursor = skipWhiteSpace(buf, cursor) + b := (*sliceHeader)(unsafe.Pointer(&buf)).data + switch char(b, cursor) { + case 'n': + if err := validateNull(buf, cursor); err != nil { + return 0, err + } + cursor += 4 + return cursor, nil + case '{': + default: + return 0, errors.ErrInvalidBeginningOfValue(char(b, cursor), cursor) + } + cursor++ + cursor = skipWhiteSpace(buf, cursor) + if buf[cursor] == '}' { + cursor++ + return cursor, nil + } + var ( + seenFields map[int]struct{} + seenFieldNum int + ) + firstWin := (ctx.Option.Flags & FirstWinOption) != 0 + if firstWin { + seenFields = make(map[int]struct{}, d.fieldUniqueNameNum) + } + for { + c, field, err := d.keyDecoder(d, buf, cursor) + if err != nil { + return 0, err + } + cursor = skipWhiteSpace(buf, c) + if char(b, cursor) != ':' { + return 0, errors.ErrExpected("colon after object key", cursor) + } + cursor++ + if cursor >= buflen { + return 0, errors.ErrExpected("object value after colon", cursor) + } + if field != nil { + if field.err != nil { + return 0, field.err + } + if firstWin { + if _, exists := seenFields[field.fieldIdx]; exists { + c, err := skipValue(buf, cursor, depth) + if err != nil { + return 0, err + } + cursor = c + } else { + c, err := field.dec.Decode(ctx, cursor, depth, unsafe.Pointer(uintptr(p)+field.offset)) + if err != nil { + return 0, err + } + cursor = c + seenFieldNum++ + if d.fieldUniqueNameNum <= seenFieldNum { + return skipObject(buf, cursor, depth) + } + seenFields[field.fieldIdx] = struct{}{} + } + } else { + c, err := field.dec.Decode(ctx, cursor, depth, unsafe.Pointer(uintptr(p)+field.offset)) + if err != nil { + return 0, err + } + cursor = c + } + } else { + c, err := skipValue(buf, cursor, depth) + if err != nil { + return 0, err + } + cursor = c + } + cursor = skipWhiteSpace(buf, cursor) + if char(b, cursor) == '}' { + cursor++ + return cursor, nil + } + if char(b, cursor) != ',' { + return 0, errors.ErrExpected("comma after object element", cursor) + } + cursor++ + } +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/decoder/type.go b/index/server/vendor/github.com/goccy/go-json/internal/decoder/type.go new file mode 100644 index 00000000..70e9907c --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/decoder/type.go @@ -0,0 +1,29 @@ +package decoder + +import ( + "context" + "encoding" + "encoding/json" + "reflect" + "unsafe" +) + +type Decoder interface { + Decode(*RuntimeContext, int64, int64, unsafe.Pointer) (int64, error) + DecodeStream(*Stream, int64, unsafe.Pointer) error +} + +const ( + nul = '\000' + maxDecodeNestingDepth = 10000 +) + +type unmarshalerContext interface { + UnmarshalJSON(context.Context, []byte) error +} + +var ( + unmarshalJSONType = reflect.TypeOf((*json.Unmarshaler)(nil)).Elem() + unmarshalJSONContextType = reflect.TypeOf((*unmarshalerContext)(nil)).Elem() + unmarshalTextType = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem() +) diff --git a/index/server/vendor/github.com/goccy/go-json/internal/decoder/uint.go b/index/server/vendor/github.com/goccy/go-json/internal/decoder/uint.go new file mode 100644 index 00000000..a62c5149 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/decoder/uint.go @@ -0,0 +1,190 @@ +package decoder + +import ( + "fmt" + "reflect" + "unsafe" + + "github.com/goccy/go-json/internal/errors" + "github.com/goccy/go-json/internal/runtime" +) + +type uintDecoder struct { + typ *runtime.Type + kind reflect.Kind + op func(unsafe.Pointer, uint64) + structName string + fieldName string +} + +func newUintDecoder(typ *runtime.Type, structName, fieldName string, op func(unsafe.Pointer, uint64)) *uintDecoder { + return &uintDecoder{ + typ: typ, + kind: typ.Kind(), + op: op, + structName: structName, + fieldName: fieldName, + } +} + +func (d *uintDecoder) typeError(buf []byte, offset int64) *errors.UnmarshalTypeError { + return &errors.UnmarshalTypeError{ + Value: fmt.Sprintf("number %s", string(buf)), + Type: runtime.RType2Type(d.typ), + Offset: offset, + } +} + +var ( + pow10u64 = [...]uint64{ + 1e00, 1e01, 1e02, 1e03, 1e04, 1e05, 1e06, 1e07, 1e08, 1e09, + 1e10, 1e11, 1e12, 1e13, 1e14, 1e15, 1e16, 1e17, 1e18, 1e19, + } + pow10u64Len = len(pow10u64) +) + +func (d *uintDecoder) parseUint(b []byte) (uint64, error) { + maxDigit := len(b) + if maxDigit > pow10u64Len { + return 0, fmt.Errorf("invalid length of number") + } + sum := uint64(0) + for i := 0; i < maxDigit; i++ { + c := uint64(b[i]) - 48 + digitValue := pow10u64[maxDigit-i-1] + sum += c * digitValue + } + return sum, nil +} + +func (d *uintDecoder) decodeStreamByte(s *Stream) ([]byte, error) { + for { + switch s.char() { + case ' ', '\n', '\t', '\r': + s.cursor++ + continue + case '0': + s.cursor++ + return numZeroBuf, nil + case '1', '2', '3', '4', '5', '6', '7', '8', '9': + start := s.cursor + for { + s.cursor++ + if numTable[s.char()] { + continue + } else if s.char() == nul { + if s.read() { + s.cursor-- // for retry current character + continue + } + } + break + } + num := s.buf[start:s.cursor] + return num, nil + case 'n': + if err := nullBytes(s); err != nil { + return nil, err + } + return nil, nil + case nul: + if s.read() { + continue + } + default: + return nil, d.typeError([]byte{s.char()}, s.totalOffset()) + } + break + } + return nil, errors.ErrUnexpectedEndOfJSON("number(unsigned integer)", s.totalOffset()) +} + +func (d *uintDecoder) decodeByte(buf []byte, cursor int64) ([]byte, int64, error) { + for { + switch buf[cursor] { + case ' ', '\n', '\t', '\r': + cursor++ + continue + case '0': + cursor++ + return numZeroBuf, cursor, nil + case '1', '2', '3', '4', '5', '6', '7', '8', '9': + start := cursor + cursor++ + for numTable[buf[cursor]] { + cursor++ + } + num := buf[start:cursor] + return num, cursor, nil + case 'n': + if err := validateNull(buf, cursor); err != nil { + return nil, 0, err + } + cursor += 4 + return nil, cursor, nil + default: + return nil, 0, d.typeError([]byte{buf[cursor]}, cursor) + } + } +} + +func (d *uintDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error { + bytes, err := d.decodeStreamByte(s) + if err != nil { + return err + } + if bytes == nil { + return nil + } + u64, err := d.parseUint(bytes) + if err != nil { + return d.typeError(bytes, s.totalOffset()) + } + switch d.kind { + case reflect.Uint8: + if (1 << 8) <= u64 { + return d.typeError(bytes, s.totalOffset()) + } + case reflect.Uint16: + if (1 << 16) <= u64 { + return d.typeError(bytes, s.totalOffset()) + } + case reflect.Uint32: + if (1 << 32) <= u64 { + return d.typeError(bytes, s.totalOffset()) + } + } + d.op(p, u64) + return nil +} + +func (d *uintDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) { + bytes, c, err := d.decodeByte(ctx.Buf, cursor) + if err != nil { + return 0, err + } + if bytes == nil { + return c, nil + } + cursor = c + u64, err := d.parseUint(bytes) + if err != nil { + return 0, d.typeError(bytes, cursor) + } + switch d.kind { + case reflect.Uint8: + if (1 << 8) <= u64 { + return 0, d.typeError(bytes, cursor) + } + case reflect.Uint16: + if (1 << 16) <= u64 { + return 0, d.typeError(bytes, cursor) + } + case reflect.Uint32: + if (1 << 32) <= u64 { + return 0, d.typeError(bytes, cursor) + } + } + d.op(p, u64) + return cursor, nil +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/decoder/unmarshal_json.go b/index/server/vendor/github.com/goccy/go-json/internal/decoder/unmarshal_json.go new file mode 100644 index 00000000..e9b25c68 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/decoder/unmarshal_json.go @@ -0,0 +1,99 @@ +package decoder + +import ( + "context" + "encoding/json" + "unsafe" + + "github.com/goccy/go-json/internal/errors" + "github.com/goccy/go-json/internal/runtime" +) + +type unmarshalJSONDecoder struct { + typ *runtime.Type + structName string + fieldName string +} + +func newUnmarshalJSONDecoder(typ *runtime.Type, structName, fieldName string) *unmarshalJSONDecoder { + return &unmarshalJSONDecoder{ + typ: typ, + structName: structName, + fieldName: fieldName, + } +} + +func (d *unmarshalJSONDecoder) annotateError(cursor int64, err error) { + switch e := err.(type) { + case *errors.UnmarshalTypeError: + e.Struct = d.structName + e.Field = d.fieldName + case *errors.SyntaxError: + e.Offset = cursor + } +} + +func (d *unmarshalJSONDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error { + s.skipWhiteSpace() + start := s.cursor + if err := s.skipValue(depth); err != nil { + return err + } + src := s.buf[start:s.cursor] + dst := make([]byte, len(src)) + copy(dst, src) + + v := *(*interface{})(unsafe.Pointer(&emptyInterface{ + typ: d.typ, + ptr: p, + })) + switch v := v.(type) { + case unmarshalerContext: + var ctx context.Context + if (s.Option.Flags & ContextOption) != 0 { + ctx = s.Option.Context + } else { + ctx = context.Background() + } + if err := v.UnmarshalJSON(ctx, dst); err != nil { + d.annotateError(s.cursor, err) + return err + } + case json.Unmarshaler: + if err := v.UnmarshalJSON(dst); err != nil { + d.annotateError(s.cursor, err) + return err + } + } + return nil +} + +func (d *unmarshalJSONDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) { + buf := ctx.Buf + cursor = skipWhiteSpace(buf, cursor) + start := cursor + end, err := skipValue(buf, cursor, depth) + if err != nil { + return 0, err + } + src := buf[start:end] + dst := make([]byte, len(src)) + copy(dst, src) + + v := *(*interface{})(unsafe.Pointer(&emptyInterface{ + typ: d.typ, + ptr: p, + })) + if (ctx.Option.Flags & ContextOption) != 0 { + if err := v.(unmarshalerContext).UnmarshalJSON(ctx.Option.Context, dst); err != nil { + d.annotateError(cursor, err) + return 0, err + } + } else { + if err := v.(json.Unmarshaler).UnmarshalJSON(dst); err != nil { + d.annotateError(cursor, err) + return 0, err + } + } + return end, nil +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/decoder/unmarshal_text.go b/index/server/vendor/github.com/goccy/go-json/internal/decoder/unmarshal_text.go new file mode 100644 index 00000000..1ef28778 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/decoder/unmarshal_text.go @@ -0,0 +1,280 @@ +package decoder + +import ( + "bytes" + "encoding" + "unicode" + "unicode/utf16" + "unicode/utf8" + "unsafe" + + "github.com/goccy/go-json/internal/errors" + "github.com/goccy/go-json/internal/runtime" +) + +type unmarshalTextDecoder struct { + typ *runtime.Type + structName string + fieldName string +} + +func newUnmarshalTextDecoder(typ *runtime.Type, structName, fieldName string) *unmarshalTextDecoder { + return &unmarshalTextDecoder{ + typ: typ, + structName: structName, + fieldName: fieldName, + } +} + +func (d *unmarshalTextDecoder) annotateError(cursor int64, err error) { + switch e := err.(type) { + case *errors.UnmarshalTypeError: + e.Struct = d.structName + e.Field = d.fieldName + case *errors.SyntaxError: + e.Offset = cursor + } +} + +var ( + nullbytes = []byte(`null`) +) + +func (d *unmarshalTextDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error { + s.skipWhiteSpace() + start := s.cursor + if err := s.skipValue(depth); err != nil { + return err + } + src := s.buf[start:s.cursor] + if len(src) > 0 { + switch src[0] { + case '[': + return &errors.UnmarshalTypeError{ + Value: "array", + Type: runtime.RType2Type(d.typ), + Offset: s.totalOffset(), + } + case '{': + return &errors.UnmarshalTypeError{ + Value: "object", + Type: runtime.RType2Type(d.typ), + Offset: s.totalOffset(), + } + case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + return &errors.UnmarshalTypeError{ + Value: "number", + Type: runtime.RType2Type(d.typ), + Offset: s.totalOffset(), + } + case 'n': + if bytes.Equal(src, nullbytes) { + *(*unsafe.Pointer)(p) = nil + return nil + } + } + } + dst := make([]byte, len(src)) + copy(dst, src) + + if b, ok := unquoteBytes(dst); ok { + dst = b + } + v := *(*interface{})(unsafe.Pointer(&emptyInterface{ + typ: d.typ, + ptr: p, + })) + if err := v.(encoding.TextUnmarshaler).UnmarshalText(dst); err != nil { + d.annotateError(s.cursor, err) + return err + } + return nil +} + +func (d *unmarshalTextDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) { + buf := ctx.Buf + cursor = skipWhiteSpace(buf, cursor) + start := cursor + end, err := skipValue(buf, cursor, depth) + if err != nil { + return 0, err + } + src := buf[start:end] + if len(src) > 0 { + switch src[0] { + case '[': + return 0, &errors.UnmarshalTypeError{ + Value: "array", + Type: runtime.RType2Type(d.typ), + Offset: start, + } + case '{': + return 0, &errors.UnmarshalTypeError{ + Value: "object", + Type: runtime.RType2Type(d.typ), + Offset: start, + } + case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + return 0, &errors.UnmarshalTypeError{ + Value: "number", + Type: runtime.RType2Type(d.typ), + Offset: start, + } + case 'n': + if bytes.Equal(src, nullbytes) { + *(*unsafe.Pointer)(p) = nil + return end, nil + } + } + } + + if s, ok := unquoteBytes(src); ok { + src = s + } + v := *(*interface{})(unsafe.Pointer(&emptyInterface{ + typ: d.typ, + ptr: *(*unsafe.Pointer)(unsafe.Pointer(&p)), + })) + if err := v.(encoding.TextUnmarshaler).UnmarshalText(src); err != nil { + d.annotateError(cursor, err) + return 0, err + } + return end, nil +} + +func unquoteBytes(s []byte) (t []byte, ok bool) { + length := len(s) + if length < 2 || s[0] != '"' || s[length-1] != '"' { + return + } + s = s[1 : length-1] + length -= 2 + + // Check for unusual characters. If there are none, + // then no unquoting is needed, so return a slice of the + // original bytes. + r := 0 + for r < length { + c := s[r] + if c == '\\' || c == '"' || c < ' ' { + break + } + if c < utf8.RuneSelf { + r++ + continue + } + rr, size := utf8.DecodeRune(s[r:]) + if rr == utf8.RuneError && size == 1 { + break + } + r += size + } + if r == length { + return s, true + } + + b := make([]byte, length+2*utf8.UTFMax) + w := copy(b, s[0:r]) + for r < length { + // Out of room? Can only happen if s is full of + // malformed UTF-8 and we're replacing each + // byte with RuneError. + if w >= len(b)-2*utf8.UTFMax { + nb := make([]byte, (len(b)+utf8.UTFMax)*2) + copy(nb, b[0:w]) + b = nb + } + switch c := s[r]; { + case c == '\\': + r++ + if r >= length { + return + } + switch s[r] { + default: + return + case '"', '\\', '/', '\'': + b[w] = s[r] + r++ + w++ + case 'b': + b[w] = '\b' + r++ + w++ + case 'f': + b[w] = '\f' + r++ + w++ + case 'n': + b[w] = '\n' + r++ + w++ + case 'r': + b[w] = '\r' + r++ + w++ + case 't': + b[w] = '\t' + r++ + w++ + case 'u': + r-- + rr := getu4(s[r:]) + if rr < 0 { + return + } + r += 6 + if utf16.IsSurrogate(rr) { + rr1 := getu4(s[r:]) + if dec := utf16.DecodeRune(rr, rr1); dec != unicode.ReplacementChar { + // A valid pair; consume. + r += 6 + w += utf8.EncodeRune(b[w:], dec) + break + } + // Invalid surrogate; fall back to replacement rune. + rr = unicode.ReplacementChar + } + w += utf8.EncodeRune(b[w:], rr) + } + + // Quote, control characters are invalid. + case c == '"', c < ' ': + return + + // ASCII + case c < utf8.RuneSelf: + b[w] = c + r++ + w++ + + // Coerce to well-formed UTF-8. + default: + rr, size := utf8.DecodeRune(s[r:]) + r += size + w += utf8.EncodeRune(b[w:], rr) + } + } + return b[0:w], true +} + +func getu4(s []byte) rune { + if len(s) < 6 || s[0] != '\\' || s[1] != 'u' { + return -1 + } + var r rune + for _, c := range s[2:6] { + switch { + case '0' <= c && c <= '9': + c = c - '0' + case 'a' <= c && c <= 'f': + c = c - 'a' + 10 + case 'A' <= c && c <= 'F': + c = c - 'A' + 10 + default: + return -1 + } + r = r*16 + rune(c) + } + return r +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/decoder/wrapped_string.go b/index/server/vendor/github.com/goccy/go-json/internal/decoder/wrapped_string.go new file mode 100644 index 00000000..66227ae0 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/decoder/wrapped_string.go @@ -0,0 +1,68 @@ +package decoder + +import ( + "reflect" + "unsafe" + + "github.com/goccy/go-json/internal/runtime" +) + +type wrappedStringDecoder struct { + typ *runtime.Type + dec Decoder + stringDecoder *stringDecoder + structName string + fieldName string + isPtrType bool +} + +func newWrappedStringDecoder(typ *runtime.Type, dec Decoder, structName, fieldName string) *wrappedStringDecoder { + return &wrappedStringDecoder{ + typ: typ, + dec: dec, + stringDecoder: newStringDecoder(structName, fieldName), + structName: structName, + fieldName: fieldName, + isPtrType: typ.Kind() == reflect.Ptr, + } +} + +func (d *wrappedStringDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error { + bytes, err := d.stringDecoder.decodeStreamByte(s) + if err != nil { + return err + } + if bytes == nil { + if d.isPtrType { + *(*unsafe.Pointer)(p) = nil + } + return nil + } + b := make([]byte, len(bytes)+1) + copy(b, bytes) + if _, err := d.dec.Decode(&RuntimeContext{Buf: b}, 0, depth, p); err != nil { + return err + } + return nil +} + +func (d *wrappedStringDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) { + bytes, c, err := d.stringDecoder.decodeByte(ctx.Buf, cursor) + if err != nil { + return 0, err + } + if bytes == nil { + if d.isPtrType { + *(*unsafe.Pointer)(p) = nil + } + return c, nil + } + bytes = append(bytes, nul) + oldBuf := ctx.Buf + ctx.Buf = bytes + if _, err := d.dec.Decode(ctx, 0, depth, p); err != nil { + return 0, err + } + ctx.Buf = oldBuf + return c, nil +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/code.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/code.go new file mode 100644 index 00000000..8d62a9cd --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/code.go @@ -0,0 +1,1017 @@ +package encoder + +import ( + "fmt" + "reflect" + "unsafe" + + "github.com/goccy/go-json/internal/runtime" +) + +type Code interface { + Kind() CodeKind + ToOpcode(*compileContext) Opcodes + Filter(*FieldQuery) Code +} + +type AnonymousCode interface { + ToAnonymousOpcode(*compileContext) Opcodes +} + +type Opcodes []*Opcode + +func (o Opcodes) First() *Opcode { + if len(o) == 0 { + return nil + } + return o[0] +} + +func (o Opcodes) Last() *Opcode { + if len(o) == 0 { + return nil + } + return o[len(o)-1] +} + +func (o Opcodes) Add(codes ...*Opcode) Opcodes { + return append(o, codes...) +} + +type CodeKind int + +const ( + CodeKindInterface CodeKind = iota + CodeKindPtr + CodeKindInt + CodeKindUint + CodeKindFloat + CodeKindString + CodeKindBool + CodeKindStruct + CodeKindMap + CodeKindSlice + CodeKindArray + CodeKindBytes + CodeKindMarshalJSON + CodeKindMarshalText + CodeKindRecursive +) + +type IntCode struct { + typ *runtime.Type + bitSize uint8 + isString bool + isPtr bool +} + +func (c *IntCode) Kind() CodeKind { + return CodeKindInt +} + +func (c *IntCode) ToOpcode(ctx *compileContext) Opcodes { + var code *Opcode + switch { + case c.isPtr: + code = newOpCode(ctx, c.typ, OpIntPtr) + case c.isString: + code = newOpCode(ctx, c.typ, OpIntString) + default: + code = newOpCode(ctx, c.typ, OpInt) + } + code.NumBitSize = c.bitSize + ctx.incIndex() + return Opcodes{code} +} + +func (c *IntCode) Filter(_ *FieldQuery) Code { + return c +} + +type UintCode struct { + typ *runtime.Type + bitSize uint8 + isString bool + isPtr bool +} + +func (c *UintCode) Kind() CodeKind { + return CodeKindUint +} + +func (c *UintCode) ToOpcode(ctx *compileContext) Opcodes { + var code *Opcode + switch { + case c.isPtr: + code = newOpCode(ctx, c.typ, OpUintPtr) + case c.isString: + code = newOpCode(ctx, c.typ, OpUintString) + default: + code = newOpCode(ctx, c.typ, OpUint) + } + code.NumBitSize = c.bitSize + ctx.incIndex() + return Opcodes{code} +} + +func (c *UintCode) Filter(_ *FieldQuery) Code { + return c +} + +type FloatCode struct { + typ *runtime.Type + bitSize uint8 + isPtr bool +} + +func (c *FloatCode) Kind() CodeKind { + return CodeKindFloat +} + +func (c *FloatCode) ToOpcode(ctx *compileContext) Opcodes { + var code *Opcode + switch { + case c.isPtr: + switch c.bitSize { + case 32: + code = newOpCode(ctx, c.typ, OpFloat32Ptr) + default: + code = newOpCode(ctx, c.typ, OpFloat64Ptr) + } + default: + switch c.bitSize { + case 32: + code = newOpCode(ctx, c.typ, OpFloat32) + default: + code = newOpCode(ctx, c.typ, OpFloat64) + } + } + ctx.incIndex() + return Opcodes{code} +} + +func (c *FloatCode) Filter(_ *FieldQuery) Code { + return c +} + +type StringCode struct { + typ *runtime.Type + isPtr bool +} + +func (c *StringCode) Kind() CodeKind { + return CodeKindString +} + +func (c *StringCode) ToOpcode(ctx *compileContext) Opcodes { + isJSONNumberType := c.typ == runtime.Type2RType(jsonNumberType) + var code *Opcode + if c.isPtr { + if isJSONNumberType { + code = newOpCode(ctx, c.typ, OpNumberPtr) + } else { + code = newOpCode(ctx, c.typ, OpStringPtr) + } + } else { + if isJSONNumberType { + code = newOpCode(ctx, c.typ, OpNumber) + } else { + code = newOpCode(ctx, c.typ, OpString) + } + } + ctx.incIndex() + return Opcodes{code} +} + +func (c *StringCode) Filter(_ *FieldQuery) Code { + return c +} + +type BoolCode struct { + typ *runtime.Type + isPtr bool +} + +func (c *BoolCode) Kind() CodeKind { + return CodeKindBool +} + +func (c *BoolCode) ToOpcode(ctx *compileContext) Opcodes { + var code *Opcode + switch { + case c.isPtr: + code = newOpCode(ctx, c.typ, OpBoolPtr) + default: + code = newOpCode(ctx, c.typ, OpBool) + } + ctx.incIndex() + return Opcodes{code} +} + +func (c *BoolCode) Filter(_ *FieldQuery) Code { + return c +} + +type BytesCode struct { + typ *runtime.Type + isPtr bool +} + +func (c *BytesCode) Kind() CodeKind { + return CodeKindBytes +} + +func (c *BytesCode) ToOpcode(ctx *compileContext) Opcodes { + var code *Opcode + switch { + case c.isPtr: + code = newOpCode(ctx, c.typ, OpBytesPtr) + default: + code = newOpCode(ctx, c.typ, OpBytes) + } + ctx.incIndex() + return Opcodes{code} +} + +func (c *BytesCode) Filter(_ *FieldQuery) Code { + return c +} + +type SliceCode struct { + typ *runtime.Type + value Code +} + +func (c *SliceCode) Kind() CodeKind { + return CodeKindSlice +} + +func (c *SliceCode) ToOpcode(ctx *compileContext) Opcodes { + // header => opcode => elem => end + // ^ | + // |________| + size := c.typ.Elem().Size() + header := newSliceHeaderCode(ctx, c.typ) + ctx.incIndex() + + ctx.incIndent() + codes := c.value.ToOpcode(ctx) + ctx.decIndent() + + codes.First().Flags |= IndirectFlags + elemCode := newSliceElemCode(ctx, c.typ.Elem(), header, size) + ctx.incIndex() + end := newOpCode(ctx, c.typ, OpSliceEnd) + ctx.incIndex() + header.End = end + header.Next = codes.First() + codes.Last().Next = elemCode + elemCode.Next = codes.First() + elemCode.End = end + return Opcodes{header}.Add(codes...).Add(elemCode).Add(end) +} + +func (c *SliceCode) Filter(_ *FieldQuery) Code { + return c +} + +type ArrayCode struct { + typ *runtime.Type + value Code +} + +func (c *ArrayCode) Kind() CodeKind { + return CodeKindArray +} + +func (c *ArrayCode) ToOpcode(ctx *compileContext) Opcodes { + // header => opcode => elem => end + // ^ | + // |________| + elem := c.typ.Elem() + alen := c.typ.Len() + size := elem.Size() + + header := newArrayHeaderCode(ctx, c.typ, alen) + ctx.incIndex() + + ctx.incIndent() + codes := c.value.ToOpcode(ctx) + ctx.decIndent() + + codes.First().Flags |= IndirectFlags + + elemCode := newArrayElemCode(ctx, elem, header, alen, size) + ctx.incIndex() + + end := newOpCode(ctx, c.typ, OpArrayEnd) + ctx.incIndex() + + header.End = end + header.Next = codes.First() + codes.Last().Next = elemCode + elemCode.Next = codes.First() + elemCode.End = end + + return Opcodes{header}.Add(codes...).Add(elemCode).Add(end) +} + +func (c *ArrayCode) Filter(_ *FieldQuery) Code { + return c +} + +type MapCode struct { + typ *runtime.Type + key Code + value Code +} + +func (c *MapCode) Kind() CodeKind { + return CodeKindMap +} + +func (c *MapCode) ToOpcode(ctx *compileContext) Opcodes { + // header => code => value => code => key => code => value => code => end + // ^ | + // |_______________________| + header := newMapHeaderCode(ctx, c.typ) + ctx.incIndex() + + keyCodes := c.key.ToOpcode(ctx) + + value := newMapValueCode(ctx, c.typ.Elem(), header) + ctx.incIndex() + + ctx.incIndent() + valueCodes := c.value.ToOpcode(ctx) + ctx.decIndent() + + valueCodes.First().Flags |= IndirectFlags + + key := newMapKeyCode(ctx, c.typ.Key(), header) + ctx.incIndex() + + end := newMapEndCode(ctx, c.typ, header) + ctx.incIndex() + + header.Next = keyCodes.First() + keyCodes.Last().Next = value + value.Next = valueCodes.First() + valueCodes.Last().Next = key + key.Next = keyCodes.First() + + header.End = end + key.End = end + value.End = end + return Opcodes{header}.Add(keyCodes...).Add(value).Add(valueCodes...).Add(key).Add(end) +} + +func (c *MapCode) Filter(_ *FieldQuery) Code { + return c +} + +type StructCode struct { + typ *runtime.Type + fields []*StructFieldCode + isPtr bool + disableIndirectConversion bool + isIndirect bool + isRecursive bool +} + +func (c *StructCode) Kind() CodeKind { + return CodeKindStruct +} + +func (c *StructCode) lastFieldCode(field *StructFieldCode, firstField *Opcode) *Opcode { + if isEmbeddedStruct(field) { + return c.lastAnonymousFieldCode(firstField) + } + lastField := firstField + for lastField.NextField != nil { + lastField = lastField.NextField + } + return lastField +} + +func (c *StructCode) lastAnonymousFieldCode(firstField *Opcode) *Opcode { + // firstField is special StructHead operation for anonymous structure. + // So, StructHead's next operation is truly struct head operation. + lastField := firstField.Next + for lastField.NextField != nil { + lastField = lastField.NextField + } + return lastField +} + +func (c *StructCode) ToOpcode(ctx *compileContext) Opcodes { + // header => code => structField => code => end + // ^ | + // |__________| + if c.isRecursive { + recursive := newRecursiveCode(ctx, c.typ, &CompiledCode{}) + recursive.Type = c.typ + ctx.incIndex() + *ctx.recursiveCodes = append(*ctx.recursiveCodes, recursive) + return Opcodes{recursive} + } + codes := Opcodes{} + var prevField *Opcode + ctx.incIndent() + for idx, field := range c.fields { + isFirstField := idx == 0 + isEndField := idx == len(c.fields)-1 + fieldCodes := field.ToOpcode(ctx, isFirstField, isEndField) + for _, code := range fieldCodes { + if c.isIndirect { + code.Flags |= IndirectFlags + } + } + firstField := fieldCodes.First() + if len(codes) > 0 { + codes.Last().Next = firstField + firstField.Idx = codes.First().Idx + } + if prevField != nil { + prevField.NextField = firstField + } + if isEndField { + endField := fieldCodes.Last() + if isEmbeddedStruct(field) { + firstField.End = endField + lastField := c.lastAnonymousFieldCode(firstField) + lastField.NextField = endField + } + if len(codes) > 0 { + codes.First().End = endField + } else { + firstField.End = endField + } + codes = codes.Add(fieldCodes...) + break + } + prevField = c.lastFieldCode(field, firstField) + codes = codes.Add(fieldCodes...) + } + if len(codes) == 0 { + head := &Opcode{ + Op: OpStructHead, + Idx: opcodeOffset(ctx.ptrIndex), + Type: c.typ, + DisplayIdx: ctx.opcodeIndex, + Indent: ctx.indent, + } + ctx.incOpcodeIndex() + end := &Opcode{ + Op: OpStructEnd, + Idx: opcodeOffset(ctx.ptrIndex), + DisplayIdx: ctx.opcodeIndex, + Indent: ctx.indent, + } + head.NextField = end + head.Next = end + head.End = end + codes = codes.Add(head, end) + ctx.incIndex() + } + ctx.decIndent() + ctx.structTypeToCodes[uintptr(unsafe.Pointer(c.typ))] = codes + return codes +} + +func (c *StructCode) ToAnonymousOpcode(ctx *compileContext) Opcodes { + // header => code => structField => code => end + // ^ | + // |__________| + if c.isRecursive { + recursive := newRecursiveCode(ctx, c.typ, &CompiledCode{}) + recursive.Type = c.typ + ctx.incIndex() + *ctx.recursiveCodes = append(*ctx.recursiveCodes, recursive) + return Opcodes{recursive} + } + codes := Opcodes{} + var prevField *Opcode + for idx, field := range c.fields { + isFirstField := idx == 0 + isEndField := idx == len(c.fields)-1 + fieldCodes := field.ToAnonymousOpcode(ctx, isFirstField, isEndField) + for _, code := range fieldCodes { + if c.isIndirect { + code.Flags |= IndirectFlags + } + } + firstField := fieldCodes.First() + if len(codes) > 0 { + codes.Last().Next = firstField + firstField.Idx = codes.First().Idx + } + if prevField != nil { + prevField.NextField = firstField + } + if isEndField { + lastField := fieldCodes.Last() + if len(codes) > 0 { + codes.First().End = lastField + } else { + firstField.End = lastField + } + } + prevField = firstField + codes = codes.Add(fieldCodes...) + } + return codes +} + +func (c *StructCode) removeFieldsByTags(tags runtime.StructTags) { + fields := make([]*StructFieldCode, 0, len(c.fields)) + for _, field := range c.fields { + if field.isAnonymous { + structCode := field.getAnonymousStruct() + if structCode != nil && !structCode.isRecursive { + structCode.removeFieldsByTags(tags) + if len(structCode.fields) > 0 { + fields = append(fields, field) + } + continue + } + } + if tags.ExistsKey(field.key) { + continue + } + fields = append(fields, field) + } + c.fields = fields +} + +func (c *StructCode) enableIndirect() { + if c.isIndirect { + return + } + c.isIndirect = true + if len(c.fields) == 0 { + return + } + structCode := c.fields[0].getStruct() + if structCode == nil { + return + } + structCode.enableIndirect() +} + +func (c *StructCode) Filter(query *FieldQuery) Code { + fieldMap := map[string]*FieldQuery{} + for _, field := range query.Fields { + fieldMap[field.Name] = field + } + fields := make([]*StructFieldCode, 0, len(c.fields)) + for _, field := range c.fields { + query, exists := fieldMap[field.key] + if !exists { + continue + } + fieldCode := &StructFieldCode{ + typ: field.typ, + key: field.key, + tag: field.tag, + value: field.value, + offset: field.offset, + isAnonymous: field.isAnonymous, + isTaggedKey: field.isTaggedKey, + isNilableType: field.isNilableType, + isNilCheck: field.isNilCheck, + isAddrForMarshaler: field.isAddrForMarshaler, + isNextOpPtrType: field.isNextOpPtrType, + } + if len(query.Fields) > 0 { + fieldCode.value = fieldCode.value.Filter(query) + } + fields = append(fields, fieldCode) + } + return &StructCode{ + typ: c.typ, + fields: fields, + isPtr: c.isPtr, + disableIndirectConversion: c.disableIndirectConversion, + isIndirect: c.isIndirect, + isRecursive: c.isRecursive, + } +} + +type StructFieldCode struct { + typ *runtime.Type + key string + tag *runtime.StructTag + value Code + offset uintptr + isAnonymous bool + isTaggedKey bool + isNilableType bool + isNilCheck bool + isAddrForMarshaler bool + isNextOpPtrType bool + isMarshalerContext bool +} + +func (c *StructFieldCode) getStruct() *StructCode { + value := c.value + ptr, ok := value.(*PtrCode) + if ok { + value = ptr.value + } + structCode, ok := value.(*StructCode) + if ok { + return structCode + } + return nil +} + +func (c *StructFieldCode) getAnonymousStruct() *StructCode { + if !c.isAnonymous { + return nil + } + return c.getStruct() +} + +func optimizeStructHeader(code *Opcode, tag *runtime.StructTag) OpType { + headType := code.ToHeaderType(tag.IsString) + if tag.IsOmitEmpty { + headType = headType.HeadToOmitEmptyHead() + } + return headType +} + +func optimizeStructField(code *Opcode, tag *runtime.StructTag) OpType { + fieldType := code.ToFieldType(tag.IsString) + if tag.IsOmitEmpty { + fieldType = fieldType.FieldToOmitEmptyField() + } + return fieldType +} + +func (c *StructFieldCode) headerOpcodes(ctx *compileContext, field *Opcode, valueCodes Opcodes) Opcodes { + value := valueCodes.First() + op := optimizeStructHeader(value, c.tag) + field.Op = op + if value.Flags&MarshalerContextFlags != 0 { + field.Flags |= MarshalerContextFlags + } + field.NumBitSize = value.NumBitSize + field.PtrNum = value.PtrNum + field.FieldQuery = value.FieldQuery + fieldCodes := Opcodes{field} + if op.IsMultipleOpHead() { + field.Next = value + fieldCodes = fieldCodes.Add(valueCodes...) + } else { + ctx.decIndex() + } + return fieldCodes +} + +func (c *StructFieldCode) fieldOpcodes(ctx *compileContext, field *Opcode, valueCodes Opcodes) Opcodes { + value := valueCodes.First() + op := optimizeStructField(value, c.tag) + field.Op = op + if value.Flags&MarshalerContextFlags != 0 { + field.Flags |= MarshalerContextFlags + } + field.NumBitSize = value.NumBitSize + field.PtrNum = value.PtrNum + field.FieldQuery = value.FieldQuery + + fieldCodes := Opcodes{field} + if op.IsMultipleOpField() { + field.Next = value + fieldCodes = fieldCodes.Add(valueCodes...) + } else { + ctx.decIndex() + } + return fieldCodes +} + +func (c *StructFieldCode) addStructEndCode(ctx *compileContext, codes Opcodes) Opcodes { + end := &Opcode{ + Op: OpStructEnd, + Idx: opcodeOffset(ctx.ptrIndex), + DisplayIdx: ctx.opcodeIndex, + Indent: ctx.indent, + } + codes.Last().Next = end + codes.First().NextField = end + codes = codes.Add(end) + ctx.incOpcodeIndex() + return codes +} + +func (c *StructFieldCode) structKey(ctx *compileContext) string { + if ctx.escapeKey { + rctx := &RuntimeContext{Option: &Option{Flag: HTMLEscapeOption}} + return fmt.Sprintf(`%s:`, string(AppendString(rctx, []byte{}, c.key))) + } + return fmt.Sprintf(`"%s":`, c.key) +} + +func (c *StructFieldCode) flags() OpFlags { + var flags OpFlags + if c.isTaggedKey { + flags |= IsTaggedKeyFlags + } + if c.isNilableType { + flags |= IsNilableTypeFlags + } + if c.isNilCheck { + flags |= NilCheckFlags + } + if c.isAddrForMarshaler { + flags |= AddrForMarshalerFlags + } + if c.isNextOpPtrType { + flags |= IsNextOpPtrTypeFlags + } + if c.isAnonymous { + flags |= AnonymousKeyFlags + } + if c.isMarshalerContext { + flags |= MarshalerContextFlags + } + return flags +} + +func (c *StructFieldCode) toValueOpcodes(ctx *compileContext) Opcodes { + if c.isAnonymous { + anonymCode, ok := c.value.(AnonymousCode) + if ok { + return anonymCode.ToAnonymousOpcode(ctx) + } + } + return c.value.ToOpcode(ctx) +} + +func (c *StructFieldCode) ToOpcode(ctx *compileContext, isFirstField, isEndField bool) Opcodes { + field := &Opcode{ + Idx: opcodeOffset(ctx.ptrIndex), + Flags: c.flags(), + Key: c.structKey(ctx), + Offset: uint32(c.offset), + Type: c.typ, + DisplayIdx: ctx.opcodeIndex, + Indent: ctx.indent, + DisplayKey: c.key, + } + ctx.incIndex() + valueCodes := c.toValueOpcodes(ctx) + if isFirstField { + codes := c.headerOpcodes(ctx, field, valueCodes) + if isEndField { + codes = c.addStructEndCode(ctx, codes) + } + return codes + } + codes := c.fieldOpcodes(ctx, field, valueCodes) + if isEndField { + if isEnableStructEndOptimization(c.value) { + field.Op = field.Op.FieldToEnd() + } else { + codes = c.addStructEndCode(ctx, codes) + } + } + return codes +} + +func (c *StructFieldCode) ToAnonymousOpcode(ctx *compileContext, isFirstField, isEndField bool) Opcodes { + field := &Opcode{ + Idx: opcodeOffset(ctx.ptrIndex), + Flags: c.flags() | AnonymousHeadFlags, + Key: c.structKey(ctx), + Offset: uint32(c.offset), + Type: c.typ, + DisplayIdx: ctx.opcodeIndex, + Indent: ctx.indent, + DisplayKey: c.key, + } + ctx.incIndex() + valueCodes := c.toValueOpcodes(ctx) + if isFirstField { + return c.headerOpcodes(ctx, field, valueCodes) + } + return c.fieldOpcodes(ctx, field, valueCodes) +} + +func isEnableStructEndOptimization(value Code) bool { + switch value.Kind() { + case CodeKindInt, + CodeKindUint, + CodeKindFloat, + CodeKindString, + CodeKindBool, + CodeKindBytes: + return true + case CodeKindPtr: + return isEnableStructEndOptimization(value.(*PtrCode).value) + default: + return false + } +} + +type InterfaceCode struct { + typ *runtime.Type + fieldQuery *FieldQuery + isPtr bool +} + +func (c *InterfaceCode) Kind() CodeKind { + return CodeKindInterface +} + +func (c *InterfaceCode) ToOpcode(ctx *compileContext) Opcodes { + var code *Opcode + switch { + case c.isPtr: + code = newOpCode(ctx, c.typ, OpInterfacePtr) + default: + code = newOpCode(ctx, c.typ, OpInterface) + } + code.FieldQuery = c.fieldQuery + if c.typ.NumMethod() > 0 { + code.Flags |= NonEmptyInterfaceFlags + } + ctx.incIndex() + return Opcodes{code} +} + +func (c *InterfaceCode) Filter(query *FieldQuery) Code { + return &InterfaceCode{ + typ: c.typ, + fieldQuery: query, + isPtr: c.isPtr, + } +} + +type MarshalJSONCode struct { + typ *runtime.Type + fieldQuery *FieldQuery + isAddrForMarshaler bool + isNilableType bool + isMarshalerContext bool +} + +func (c *MarshalJSONCode) Kind() CodeKind { + return CodeKindMarshalJSON +} + +func (c *MarshalJSONCode) ToOpcode(ctx *compileContext) Opcodes { + code := newOpCode(ctx, c.typ, OpMarshalJSON) + code.FieldQuery = c.fieldQuery + if c.isAddrForMarshaler { + code.Flags |= AddrForMarshalerFlags + } + if c.isMarshalerContext { + code.Flags |= MarshalerContextFlags + } + if c.isNilableType { + code.Flags |= IsNilableTypeFlags + } else { + code.Flags &= ^IsNilableTypeFlags + } + ctx.incIndex() + return Opcodes{code} +} + +func (c *MarshalJSONCode) Filter(query *FieldQuery) Code { + return &MarshalJSONCode{ + typ: c.typ, + fieldQuery: query, + isAddrForMarshaler: c.isAddrForMarshaler, + isNilableType: c.isNilableType, + isMarshalerContext: c.isMarshalerContext, + } +} + +type MarshalTextCode struct { + typ *runtime.Type + fieldQuery *FieldQuery + isAddrForMarshaler bool + isNilableType bool +} + +func (c *MarshalTextCode) Kind() CodeKind { + return CodeKindMarshalText +} + +func (c *MarshalTextCode) ToOpcode(ctx *compileContext) Opcodes { + code := newOpCode(ctx, c.typ, OpMarshalText) + code.FieldQuery = c.fieldQuery + if c.isAddrForMarshaler { + code.Flags |= AddrForMarshalerFlags + } + if c.isNilableType { + code.Flags |= IsNilableTypeFlags + } else { + code.Flags &= ^IsNilableTypeFlags + } + ctx.incIndex() + return Opcodes{code} +} + +func (c *MarshalTextCode) Filter(query *FieldQuery) Code { + return &MarshalTextCode{ + typ: c.typ, + fieldQuery: query, + isAddrForMarshaler: c.isAddrForMarshaler, + isNilableType: c.isNilableType, + } +} + +type PtrCode struct { + typ *runtime.Type + value Code + ptrNum uint8 +} + +func (c *PtrCode) Kind() CodeKind { + return CodeKindPtr +} + +func (c *PtrCode) ToOpcode(ctx *compileContext) Opcodes { + codes := c.value.ToOpcode(ctx) + codes.First().Op = convertPtrOp(codes.First()) + codes.First().PtrNum = c.ptrNum + return codes +} + +func (c *PtrCode) ToAnonymousOpcode(ctx *compileContext) Opcodes { + var codes Opcodes + anonymCode, ok := c.value.(AnonymousCode) + if ok { + codes = anonymCode.ToAnonymousOpcode(ctx) + } else { + codes = c.value.ToOpcode(ctx) + } + codes.First().Op = convertPtrOp(codes.First()) + codes.First().PtrNum = c.ptrNum + return codes +} + +func (c *PtrCode) Filter(query *FieldQuery) Code { + return &PtrCode{ + typ: c.typ, + value: c.value.Filter(query), + ptrNum: c.ptrNum, + } +} + +func convertPtrOp(code *Opcode) OpType { + ptrHeadOp := code.Op.HeadToPtrHead() + if code.Op != ptrHeadOp { + if code.PtrNum > 0 { + // ptr field and ptr head + code.PtrNum-- + } + return ptrHeadOp + } + switch code.Op { + case OpInt: + return OpIntPtr + case OpUint: + return OpUintPtr + case OpFloat32: + return OpFloat32Ptr + case OpFloat64: + return OpFloat64Ptr + case OpString: + return OpStringPtr + case OpBool: + return OpBoolPtr + case OpBytes: + return OpBytesPtr + case OpNumber: + return OpNumberPtr + case OpArray: + return OpArrayPtr + case OpSlice: + return OpSlicePtr + case OpMap: + return OpMapPtr + case OpMarshalJSON: + return OpMarshalJSONPtr + case OpMarshalText: + return OpMarshalTextPtr + case OpInterface: + return OpInterfacePtr + case OpRecursive: + return OpRecursivePtr + } + return code.Op +} + +func isEmbeddedStruct(field *StructFieldCode) bool { + if !field.isAnonymous { + return false + } + t := field.typ + if t.Kind() == reflect.Ptr { + t = t.Elem() + } + return t.Kind() == reflect.Struct +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/compact.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/compact.go new file mode 100644 index 00000000..0eb9545d --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/compact.go @@ -0,0 +1,286 @@ +package encoder + +import ( + "bytes" + "fmt" + "strconv" + "unsafe" + + "github.com/goccy/go-json/internal/errors" +) + +var ( + isWhiteSpace = [256]bool{ + ' ': true, + '\n': true, + '\t': true, + '\r': true, + } + isHTMLEscapeChar = [256]bool{ + '<': true, + '>': true, + '&': true, + } + nul = byte('\000') +) + +func Compact(buf *bytes.Buffer, src []byte, escape bool) error { + if len(src) == 0 { + return errors.ErrUnexpectedEndOfJSON("", 0) + } + buf.Grow(len(src)) + dst := buf.Bytes() + + ctx := TakeRuntimeContext() + ctxBuf := ctx.Buf[:0] + ctxBuf = append(append(ctxBuf, src...), nul) + ctx.Buf = ctxBuf + + if err := compactAndWrite(buf, dst, ctxBuf, escape); err != nil { + ReleaseRuntimeContext(ctx) + return err + } + ReleaseRuntimeContext(ctx) + return nil +} + +func compactAndWrite(buf *bytes.Buffer, dst []byte, src []byte, escape bool) error { + dst, err := compact(dst, src, escape) + if err != nil { + return err + } + if _, err := buf.Write(dst); err != nil { + return err + } + return nil +} + +func compact(dst, src []byte, escape bool) ([]byte, error) { + buf, cursor, err := compactValue(dst, src, 0, escape) + if err != nil { + return nil, err + } + if err := validateEndBuf(src, cursor); err != nil { + return nil, err + } + return buf, nil +} + +func validateEndBuf(src []byte, cursor int64) error { + for { + switch src[cursor] { + case ' ', '\t', '\n', '\r': + cursor++ + continue + case nul: + return nil + } + return errors.ErrSyntax( + fmt.Sprintf("invalid character '%c' after top-level value", src[cursor]), + cursor+1, + ) + } +} + +func skipWhiteSpace(buf []byte, cursor int64) int64 { +LOOP: + if isWhiteSpace[buf[cursor]] { + cursor++ + goto LOOP + } + return cursor +} + +func compactValue(dst, src []byte, cursor int64, escape bool) ([]byte, int64, error) { + for { + switch src[cursor] { + case ' ', '\t', '\n', '\r': + cursor++ + continue + case '{': + return compactObject(dst, src, cursor, escape) + case '}': + return nil, 0, errors.ErrSyntax("unexpected character '}'", cursor) + case '[': + return compactArray(dst, src, cursor, escape) + case ']': + return nil, 0, errors.ErrSyntax("unexpected character ']'", cursor) + case '"': + return compactString(dst, src, cursor, escape) + case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + return compactNumber(dst, src, cursor) + case 't': + return compactTrue(dst, src, cursor) + case 'f': + return compactFalse(dst, src, cursor) + case 'n': + return compactNull(dst, src, cursor) + default: + return nil, 0, errors.ErrSyntax(fmt.Sprintf("unexpected character '%c'", src[cursor]), cursor) + } + } +} + +func compactObject(dst, src []byte, cursor int64, escape bool) ([]byte, int64, error) { + if src[cursor] == '{' { + dst = append(dst, '{') + } else { + return nil, 0, errors.ErrExpected("expected { character for object value", cursor) + } + cursor = skipWhiteSpace(src, cursor+1) + if src[cursor] == '}' { + dst = append(dst, '}') + return dst, cursor + 1, nil + } + var err error + for { + cursor = skipWhiteSpace(src, cursor) + dst, cursor, err = compactString(dst, src, cursor, escape) + if err != nil { + return nil, 0, err + } + cursor = skipWhiteSpace(src, cursor) + if src[cursor] != ':' { + return nil, 0, errors.ErrExpected("colon after object key", cursor) + } + dst = append(dst, ':') + dst, cursor, err = compactValue(dst, src, cursor+1, escape) + if err != nil { + return nil, 0, err + } + cursor = skipWhiteSpace(src, cursor) + switch src[cursor] { + case '}': + dst = append(dst, '}') + cursor++ + return dst, cursor, nil + case ',': + dst = append(dst, ',') + default: + return nil, 0, errors.ErrExpected("comma after object value", cursor) + } + cursor++ + } +} + +func compactArray(dst, src []byte, cursor int64, escape bool) ([]byte, int64, error) { + if src[cursor] == '[' { + dst = append(dst, '[') + } else { + return nil, 0, errors.ErrExpected("expected [ character for array value", cursor) + } + cursor = skipWhiteSpace(src, cursor+1) + if src[cursor] == ']' { + dst = append(dst, ']') + return dst, cursor + 1, nil + } + var err error + for { + dst, cursor, err = compactValue(dst, src, cursor, escape) + if err != nil { + return nil, 0, err + } + cursor = skipWhiteSpace(src, cursor) + switch src[cursor] { + case ']': + dst = append(dst, ']') + cursor++ + return dst, cursor, nil + case ',': + dst = append(dst, ',') + default: + return nil, 0, errors.ErrExpected("comma after array value", cursor) + } + cursor++ + } +} + +func compactString(dst, src []byte, cursor int64, escape bool) ([]byte, int64, error) { + if src[cursor] != '"' { + return nil, 0, errors.ErrInvalidCharacter(src[cursor], "string", cursor) + } + start := cursor + for { + cursor++ + c := src[cursor] + if escape { + if isHTMLEscapeChar[c] { + dst = append(dst, src[start:cursor]...) + dst = append(dst, `\u00`...) + dst = append(dst, hex[c>>4], hex[c&0xF]) + start = cursor + 1 + } else if c == 0xE2 && cursor+2 < int64(len(src)) && src[cursor+1] == 0x80 && src[cursor+2]&^1 == 0xA8 { + dst = append(dst, src[start:cursor]...) + dst = append(dst, `\u202`...) + dst = append(dst, hex[src[cursor+2]&0xF]) + cursor += 2 + start = cursor + 3 + } + } + switch c { + case '\\': + cursor++ + if src[cursor] == nul { + return nil, 0, errors.ErrUnexpectedEndOfJSON("string", int64(len(src))) + } + case '"': + cursor++ + return append(dst, src[start:cursor]...), cursor, nil + case nul: + return nil, 0, errors.ErrUnexpectedEndOfJSON("string", int64(len(src))) + } + } +} + +func compactNumber(dst, src []byte, cursor int64) ([]byte, int64, error) { + start := cursor + for { + cursor++ + if floatTable[src[cursor]] { + continue + } + break + } + num := src[start:cursor] + if _, err := strconv.ParseFloat(*(*string)(unsafe.Pointer(&num)), 64); err != nil { + return nil, 0, err + } + dst = append(dst, num...) + return dst, cursor, nil +} + +func compactTrue(dst, src []byte, cursor int64) ([]byte, int64, error) { + if cursor+3 >= int64(len(src)) { + return nil, 0, errors.ErrUnexpectedEndOfJSON("true", cursor) + } + if !bytes.Equal(src[cursor:cursor+4], []byte(`true`)) { + return nil, 0, errors.ErrInvalidCharacter(src[cursor], "true", cursor) + } + dst = append(dst, "true"...) + cursor += 4 + return dst, cursor, nil +} + +func compactFalse(dst, src []byte, cursor int64) ([]byte, int64, error) { + if cursor+4 >= int64(len(src)) { + return nil, 0, errors.ErrUnexpectedEndOfJSON("false", cursor) + } + if !bytes.Equal(src[cursor:cursor+5], []byte(`false`)) { + return nil, 0, errors.ErrInvalidCharacter(src[cursor], "false", cursor) + } + dst = append(dst, "false"...) + cursor += 5 + return dst, cursor, nil +} + +func compactNull(dst, src []byte, cursor int64) ([]byte, int64, error) { + if cursor+3 >= int64(len(src)) { + return nil, 0, errors.ErrUnexpectedEndOfJSON("null", cursor) + } + if !bytes.Equal(src[cursor:cursor+4], []byte(`null`)) { + return nil, 0, errors.ErrInvalidCharacter(src[cursor], "null", cursor) + } + dst = append(dst, "null"...) + cursor += 4 + return dst, cursor, nil +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/compiler.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/compiler.go new file mode 100644 index 00000000..bf5e0f94 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/compiler.go @@ -0,0 +1,930 @@ +package encoder + +import ( + "context" + "encoding" + "encoding/json" + "reflect" + "sync/atomic" + "unsafe" + + "github.com/goccy/go-json/internal/errors" + "github.com/goccy/go-json/internal/runtime" +) + +type marshalerContext interface { + MarshalJSON(context.Context) ([]byte, error) +} + +var ( + marshalJSONType = reflect.TypeOf((*json.Marshaler)(nil)).Elem() + marshalJSONContextType = reflect.TypeOf((*marshalerContext)(nil)).Elem() + marshalTextType = reflect.TypeOf((*encoding.TextMarshaler)(nil)).Elem() + jsonNumberType = reflect.TypeOf(json.Number("")) + cachedOpcodeSets []*OpcodeSet + cachedOpcodeMap unsafe.Pointer // map[uintptr]*OpcodeSet + typeAddr *runtime.TypeAddr +) + +func init() { + typeAddr = runtime.AnalyzeTypeAddr() + if typeAddr == nil { + typeAddr = &runtime.TypeAddr{} + } + cachedOpcodeSets = make([]*OpcodeSet, typeAddr.AddrRange>>typeAddr.AddrShift+1) +} + +func loadOpcodeMap() map[uintptr]*OpcodeSet { + p := atomic.LoadPointer(&cachedOpcodeMap) + return *(*map[uintptr]*OpcodeSet)(unsafe.Pointer(&p)) +} + +func storeOpcodeSet(typ uintptr, set *OpcodeSet, m map[uintptr]*OpcodeSet) { + newOpcodeMap := make(map[uintptr]*OpcodeSet, len(m)+1) + newOpcodeMap[typ] = set + + for k, v := range m { + newOpcodeMap[k] = v + } + + atomic.StorePointer(&cachedOpcodeMap, *(*unsafe.Pointer)(unsafe.Pointer(&newOpcodeMap))) +} + +func compileToGetCodeSetSlowPath(typeptr uintptr) (*OpcodeSet, error) { + opcodeMap := loadOpcodeMap() + if codeSet, exists := opcodeMap[typeptr]; exists { + return codeSet, nil + } + codeSet, err := newCompiler().compile(typeptr) + if err != nil { + return nil, err + } + storeOpcodeSet(typeptr, codeSet, opcodeMap) + return codeSet, nil +} + +func getFilteredCodeSetIfNeeded(ctx *RuntimeContext, codeSet *OpcodeSet) (*OpcodeSet, error) { + if (ctx.Option.Flag & ContextOption) == 0 { + return codeSet, nil + } + query := FieldQueryFromContext(ctx.Option.Context) + if query == nil { + return codeSet, nil + } + ctx.Option.Flag |= FieldQueryOption + cacheCodeSet := codeSet.getQueryCache(query.Hash()) + if cacheCodeSet != nil { + return cacheCodeSet, nil + } + queryCodeSet, err := newCompiler().codeToOpcodeSet(codeSet.Type, codeSet.Code.Filter(query)) + if err != nil { + return nil, err + } + codeSet.setQueryCache(query.Hash(), queryCodeSet) + return queryCodeSet, nil +} + +type Compiler struct { + structTypeToCode map[uintptr]*StructCode +} + +func newCompiler() *Compiler { + return &Compiler{ + structTypeToCode: map[uintptr]*StructCode{}, + } +} + +func (c *Compiler) compile(typeptr uintptr) (*OpcodeSet, error) { + // noescape trick for header.typ ( reflect.*rtype ) + typ := *(**runtime.Type)(unsafe.Pointer(&typeptr)) + code, err := c.typeToCode(typ) + if err != nil { + return nil, err + } + return c.codeToOpcodeSet(typ, code) +} + +func (c *Compiler) codeToOpcodeSet(typ *runtime.Type, code Code) (*OpcodeSet, error) { + noescapeKeyCode := c.codeToOpcode(&compileContext{ + structTypeToCodes: map[uintptr]Opcodes{}, + recursiveCodes: &Opcodes{}, + }, typ, code) + if err := noescapeKeyCode.Validate(); err != nil { + return nil, err + } + escapeKeyCode := c.codeToOpcode(&compileContext{ + structTypeToCodes: map[uintptr]Opcodes{}, + recursiveCodes: &Opcodes{}, + escapeKey: true, + }, typ, code) + noescapeKeyCode = copyOpcode(noescapeKeyCode) + escapeKeyCode = copyOpcode(escapeKeyCode) + setTotalLengthToInterfaceOp(noescapeKeyCode) + setTotalLengthToInterfaceOp(escapeKeyCode) + interfaceNoescapeKeyCode := copyToInterfaceOpcode(noescapeKeyCode) + interfaceEscapeKeyCode := copyToInterfaceOpcode(escapeKeyCode) + codeLength := noescapeKeyCode.TotalLength() + return &OpcodeSet{ + Type: typ, + NoescapeKeyCode: noescapeKeyCode, + EscapeKeyCode: escapeKeyCode, + InterfaceNoescapeKeyCode: interfaceNoescapeKeyCode, + InterfaceEscapeKeyCode: interfaceEscapeKeyCode, + CodeLength: codeLength, + EndCode: ToEndCode(interfaceNoescapeKeyCode), + Code: code, + QueryCache: map[string]*OpcodeSet{}, + }, nil +} + +func (c *Compiler) typeToCode(typ *runtime.Type) (Code, error) { + switch { + case c.implementsMarshalJSON(typ): + return c.marshalJSONCode(typ) + case c.implementsMarshalText(typ): + return c.marshalTextCode(typ) + } + + isPtr := false + orgType := typ + if typ.Kind() == reflect.Ptr { + typ = typ.Elem() + isPtr = true + } + switch { + case c.implementsMarshalJSON(typ): + return c.marshalJSONCode(orgType) + case c.implementsMarshalText(typ): + return c.marshalTextCode(orgType) + } + switch typ.Kind() { + case reflect.Slice: + elem := typ.Elem() + if elem.Kind() == reflect.Uint8 { + p := runtime.PtrTo(elem) + if !c.implementsMarshalJSONType(p) && !p.Implements(marshalTextType) { + return c.bytesCode(typ, isPtr) + } + } + return c.sliceCode(typ) + case reflect.Map: + if isPtr { + return c.ptrCode(runtime.PtrTo(typ)) + } + return c.mapCode(typ) + case reflect.Struct: + return c.structCode(typ, isPtr) + case reflect.Int: + return c.intCode(typ, isPtr) + case reflect.Int8: + return c.int8Code(typ, isPtr) + case reflect.Int16: + return c.int16Code(typ, isPtr) + case reflect.Int32: + return c.int32Code(typ, isPtr) + case reflect.Int64: + return c.int64Code(typ, isPtr) + case reflect.Uint, reflect.Uintptr: + return c.uintCode(typ, isPtr) + case reflect.Uint8: + return c.uint8Code(typ, isPtr) + case reflect.Uint16: + return c.uint16Code(typ, isPtr) + case reflect.Uint32: + return c.uint32Code(typ, isPtr) + case reflect.Uint64: + return c.uint64Code(typ, isPtr) + case reflect.Float32: + return c.float32Code(typ, isPtr) + case reflect.Float64: + return c.float64Code(typ, isPtr) + case reflect.String: + return c.stringCode(typ, isPtr) + case reflect.Bool: + return c.boolCode(typ, isPtr) + case reflect.Interface: + return c.interfaceCode(typ, isPtr) + default: + if isPtr && typ.Implements(marshalTextType) { + typ = orgType + } + return c.typeToCodeWithPtr(typ, isPtr) + } +} + +func (c *Compiler) typeToCodeWithPtr(typ *runtime.Type, isPtr bool) (Code, error) { + switch { + case c.implementsMarshalJSON(typ): + return c.marshalJSONCode(typ) + case c.implementsMarshalText(typ): + return c.marshalTextCode(typ) + } + switch typ.Kind() { + case reflect.Ptr: + return c.ptrCode(typ) + case reflect.Slice: + elem := typ.Elem() + if elem.Kind() == reflect.Uint8 { + p := runtime.PtrTo(elem) + if !c.implementsMarshalJSONType(p) && !p.Implements(marshalTextType) { + return c.bytesCode(typ, false) + } + } + return c.sliceCode(typ) + case reflect.Array: + return c.arrayCode(typ) + case reflect.Map: + return c.mapCode(typ) + case reflect.Struct: + return c.structCode(typ, isPtr) + case reflect.Interface: + return c.interfaceCode(typ, false) + case reflect.Int: + return c.intCode(typ, false) + case reflect.Int8: + return c.int8Code(typ, false) + case reflect.Int16: + return c.int16Code(typ, false) + case reflect.Int32: + return c.int32Code(typ, false) + case reflect.Int64: + return c.int64Code(typ, false) + case reflect.Uint: + return c.uintCode(typ, false) + case reflect.Uint8: + return c.uint8Code(typ, false) + case reflect.Uint16: + return c.uint16Code(typ, false) + case reflect.Uint32: + return c.uint32Code(typ, false) + case reflect.Uint64: + return c.uint64Code(typ, false) + case reflect.Uintptr: + return c.uintCode(typ, false) + case reflect.Float32: + return c.float32Code(typ, false) + case reflect.Float64: + return c.float64Code(typ, false) + case reflect.String: + return c.stringCode(typ, false) + case reflect.Bool: + return c.boolCode(typ, false) + } + return nil, &errors.UnsupportedTypeError{Type: runtime.RType2Type(typ)} +} + +const intSize = 32 << (^uint(0) >> 63) + +//nolint:unparam +func (c *Compiler) intCode(typ *runtime.Type, isPtr bool) (*IntCode, error) { + return &IntCode{typ: typ, bitSize: intSize, isPtr: isPtr}, nil +} + +//nolint:unparam +func (c *Compiler) int8Code(typ *runtime.Type, isPtr bool) (*IntCode, error) { + return &IntCode{typ: typ, bitSize: 8, isPtr: isPtr}, nil +} + +//nolint:unparam +func (c *Compiler) int16Code(typ *runtime.Type, isPtr bool) (*IntCode, error) { + return &IntCode{typ: typ, bitSize: 16, isPtr: isPtr}, nil +} + +//nolint:unparam +func (c *Compiler) int32Code(typ *runtime.Type, isPtr bool) (*IntCode, error) { + return &IntCode{typ: typ, bitSize: 32, isPtr: isPtr}, nil +} + +//nolint:unparam +func (c *Compiler) int64Code(typ *runtime.Type, isPtr bool) (*IntCode, error) { + return &IntCode{typ: typ, bitSize: 64, isPtr: isPtr}, nil +} + +//nolint:unparam +func (c *Compiler) uintCode(typ *runtime.Type, isPtr bool) (*UintCode, error) { + return &UintCode{typ: typ, bitSize: intSize, isPtr: isPtr}, nil +} + +//nolint:unparam +func (c *Compiler) uint8Code(typ *runtime.Type, isPtr bool) (*UintCode, error) { + return &UintCode{typ: typ, bitSize: 8, isPtr: isPtr}, nil +} + +//nolint:unparam +func (c *Compiler) uint16Code(typ *runtime.Type, isPtr bool) (*UintCode, error) { + return &UintCode{typ: typ, bitSize: 16, isPtr: isPtr}, nil +} + +//nolint:unparam +func (c *Compiler) uint32Code(typ *runtime.Type, isPtr bool) (*UintCode, error) { + return &UintCode{typ: typ, bitSize: 32, isPtr: isPtr}, nil +} + +//nolint:unparam +func (c *Compiler) uint64Code(typ *runtime.Type, isPtr bool) (*UintCode, error) { + return &UintCode{typ: typ, bitSize: 64, isPtr: isPtr}, nil +} + +//nolint:unparam +func (c *Compiler) float32Code(typ *runtime.Type, isPtr bool) (*FloatCode, error) { + return &FloatCode{typ: typ, bitSize: 32, isPtr: isPtr}, nil +} + +//nolint:unparam +func (c *Compiler) float64Code(typ *runtime.Type, isPtr bool) (*FloatCode, error) { + return &FloatCode{typ: typ, bitSize: 64, isPtr: isPtr}, nil +} + +//nolint:unparam +func (c *Compiler) stringCode(typ *runtime.Type, isPtr bool) (*StringCode, error) { + return &StringCode{typ: typ, isPtr: isPtr}, nil +} + +//nolint:unparam +func (c *Compiler) boolCode(typ *runtime.Type, isPtr bool) (*BoolCode, error) { + return &BoolCode{typ: typ, isPtr: isPtr}, nil +} + +//nolint:unparam +func (c *Compiler) intStringCode(typ *runtime.Type) (*IntCode, error) { + return &IntCode{typ: typ, bitSize: intSize, isString: true}, nil +} + +//nolint:unparam +func (c *Compiler) int8StringCode(typ *runtime.Type) (*IntCode, error) { + return &IntCode{typ: typ, bitSize: 8, isString: true}, nil +} + +//nolint:unparam +func (c *Compiler) int16StringCode(typ *runtime.Type) (*IntCode, error) { + return &IntCode{typ: typ, bitSize: 16, isString: true}, nil +} + +//nolint:unparam +func (c *Compiler) int32StringCode(typ *runtime.Type) (*IntCode, error) { + return &IntCode{typ: typ, bitSize: 32, isString: true}, nil +} + +//nolint:unparam +func (c *Compiler) int64StringCode(typ *runtime.Type) (*IntCode, error) { + return &IntCode{typ: typ, bitSize: 64, isString: true}, nil +} + +//nolint:unparam +func (c *Compiler) uintStringCode(typ *runtime.Type) (*UintCode, error) { + return &UintCode{typ: typ, bitSize: intSize, isString: true}, nil +} + +//nolint:unparam +func (c *Compiler) uint8StringCode(typ *runtime.Type) (*UintCode, error) { + return &UintCode{typ: typ, bitSize: 8, isString: true}, nil +} + +//nolint:unparam +func (c *Compiler) uint16StringCode(typ *runtime.Type) (*UintCode, error) { + return &UintCode{typ: typ, bitSize: 16, isString: true}, nil +} + +//nolint:unparam +func (c *Compiler) uint32StringCode(typ *runtime.Type) (*UintCode, error) { + return &UintCode{typ: typ, bitSize: 32, isString: true}, nil +} + +//nolint:unparam +func (c *Compiler) uint64StringCode(typ *runtime.Type) (*UintCode, error) { + return &UintCode{typ: typ, bitSize: 64, isString: true}, nil +} + +//nolint:unparam +func (c *Compiler) bytesCode(typ *runtime.Type, isPtr bool) (*BytesCode, error) { + return &BytesCode{typ: typ, isPtr: isPtr}, nil +} + +//nolint:unparam +func (c *Compiler) interfaceCode(typ *runtime.Type, isPtr bool) (*InterfaceCode, error) { + return &InterfaceCode{typ: typ, isPtr: isPtr}, nil +} + +//nolint:unparam +func (c *Compiler) marshalJSONCode(typ *runtime.Type) (*MarshalJSONCode, error) { + return &MarshalJSONCode{ + typ: typ, + isAddrForMarshaler: c.isPtrMarshalJSONType(typ), + isNilableType: c.isNilableType(typ), + isMarshalerContext: typ.Implements(marshalJSONContextType) || runtime.PtrTo(typ).Implements(marshalJSONContextType), + }, nil +} + +//nolint:unparam +func (c *Compiler) marshalTextCode(typ *runtime.Type) (*MarshalTextCode, error) { + return &MarshalTextCode{ + typ: typ, + isAddrForMarshaler: c.isPtrMarshalTextType(typ), + isNilableType: c.isNilableType(typ), + }, nil +} + +func (c *Compiler) ptrCode(typ *runtime.Type) (*PtrCode, error) { + code, err := c.typeToCodeWithPtr(typ.Elem(), true) + if err != nil { + return nil, err + } + ptr, ok := code.(*PtrCode) + if ok { + return &PtrCode{typ: typ, value: ptr.value, ptrNum: ptr.ptrNum + 1}, nil + } + return &PtrCode{typ: typ, value: code, ptrNum: 1}, nil +} + +func (c *Compiler) sliceCode(typ *runtime.Type) (*SliceCode, error) { + elem := typ.Elem() + code, err := c.listElemCode(elem) + if err != nil { + return nil, err + } + if code.Kind() == CodeKindStruct { + structCode := code.(*StructCode) + structCode.enableIndirect() + } + return &SliceCode{typ: typ, value: code}, nil +} + +func (c *Compiler) arrayCode(typ *runtime.Type) (*ArrayCode, error) { + elem := typ.Elem() + code, err := c.listElemCode(elem) + if err != nil { + return nil, err + } + if code.Kind() == CodeKindStruct { + structCode := code.(*StructCode) + structCode.enableIndirect() + } + return &ArrayCode{typ: typ, value: code}, nil +} + +func (c *Compiler) mapCode(typ *runtime.Type) (*MapCode, error) { + keyCode, err := c.mapKeyCode(typ.Key()) + if err != nil { + return nil, err + } + valueCode, err := c.mapValueCode(typ.Elem()) + if err != nil { + return nil, err + } + if valueCode.Kind() == CodeKindStruct { + structCode := valueCode.(*StructCode) + structCode.enableIndirect() + } + return &MapCode{typ: typ, key: keyCode, value: valueCode}, nil +} + +func (c *Compiler) listElemCode(typ *runtime.Type) (Code, error) { + switch { + case c.isPtrMarshalJSONType(typ): + return c.marshalJSONCode(typ) + case !typ.Implements(marshalTextType) && runtime.PtrTo(typ).Implements(marshalTextType): + return c.marshalTextCode(typ) + case typ.Kind() == reflect.Map: + return c.ptrCode(runtime.PtrTo(typ)) + default: + // isPtr was originally used to indicate whether the type of top level is pointer. + // However, since the slice/array element is a specification that can get the pointer address, explicitly set isPtr to true. + // See here for related issues: https://github.com/goccy/go-json/issues/370 + code, err := c.typeToCodeWithPtr(typ, true) + if err != nil { + return nil, err + } + ptr, ok := code.(*PtrCode) + if ok { + if ptr.value.Kind() == CodeKindMap { + ptr.ptrNum++ + } + } + return code, nil + } +} + +func (c *Compiler) mapKeyCode(typ *runtime.Type) (Code, error) { + switch { + case c.implementsMarshalJSON(typ): + return c.marshalJSONCode(typ) + case c.implementsMarshalText(typ): + return c.marshalTextCode(typ) + } + switch typ.Kind() { + case reflect.Ptr: + return c.ptrCode(typ) + case reflect.String: + return c.stringCode(typ, false) + case reflect.Int: + return c.intStringCode(typ) + case reflect.Int8: + return c.int8StringCode(typ) + case reflect.Int16: + return c.int16StringCode(typ) + case reflect.Int32: + return c.int32StringCode(typ) + case reflect.Int64: + return c.int64StringCode(typ) + case reflect.Uint: + return c.uintStringCode(typ) + case reflect.Uint8: + return c.uint8StringCode(typ) + case reflect.Uint16: + return c.uint16StringCode(typ) + case reflect.Uint32: + return c.uint32StringCode(typ) + case reflect.Uint64: + return c.uint64StringCode(typ) + case reflect.Uintptr: + return c.uintStringCode(typ) + } + return nil, &errors.UnsupportedTypeError{Type: runtime.RType2Type(typ)} +} + +func (c *Compiler) mapValueCode(typ *runtime.Type) (Code, error) { + switch typ.Kind() { + case reflect.Map: + return c.ptrCode(runtime.PtrTo(typ)) + default: + code, err := c.typeToCodeWithPtr(typ, false) + if err != nil { + return nil, err + } + ptr, ok := code.(*PtrCode) + if ok { + if ptr.value.Kind() == CodeKindMap { + ptr.ptrNum++ + } + } + return code, nil + } +} + +func (c *Compiler) structCode(typ *runtime.Type, isPtr bool) (*StructCode, error) { + typeptr := uintptr(unsafe.Pointer(typ)) + if code, exists := c.structTypeToCode[typeptr]; exists { + derefCode := *code + derefCode.isRecursive = true + return &derefCode, nil + } + indirect := runtime.IfaceIndir(typ) + code := &StructCode{typ: typ, isPtr: isPtr, isIndirect: indirect} + c.structTypeToCode[typeptr] = code + + fieldNum := typ.NumField() + tags := c.typeToStructTags(typ) + fields := []*StructFieldCode{} + for i, tag := range tags { + isOnlyOneFirstField := i == 0 && fieldNum == 1 + field, err := c.structFieldCode(code, tag, isPtr, isOnlyOneFirstField) + if err != nil { + return nil, err + } + if field.isAnonymous { + structCode := field.getAnonymousStruct() + if structCode != nil { + structCode.removeFieldsByTags(tags) + if c.isAssignableIndirect(field, isPtr) { + if indirect { + structCode.isIndirect = true + } else { + structCode.isIndirect = false + } + } + } + } else { + structCode := field.getStruct() + if structCode != nil { + if indirect { + // if parent is indirect type, set child indirect property to true + structCode.isIndirect = true + } else { + // if parent is not indirect type, set child indirect property to false. + // but if parent's indirect is false and isPtr is true, then indirect must be true. + // Do this only if indirectConversion is enabled at the end of compileStruct. + structCode.isIndirect = false + } + } + } + fields = append(fields, field) + } + fieldMap := c.getFieldMap(fields) + duplicatedFieldMap := c.getDuplicatedFieldMap(fieldMap) + code.fields = c.filteredDuplicatedFields(fields, duplicatedFieldMap) + if !code.disableIndirectConversion && !indirect && isPtr { + code.enableIndirect() + } + delete(c.structTypeToCode, typeptr) + return code, nil +} + +func (c *Compiler) structFieldCode(structCode *StructCode, tag *runtime.StructTag, isPtr, isOnlyOneFirstField bool) (*StructFieldCode, error) { + field := tag.Field + fieldType := runtime.Type2RType(field.Type) + isIndirectSpecialCase := isPtr && isOnlyOneFirstField + fieldCode := &StructFieldCode{ + typ: fieldType, + key: tag.Key, + tag: tag, + offset: field.Offset, + isAnonymous: field.Anonymous && !tag.IsTaggedKey, + isTaggedKey: tag.IsTaggedKey, + isNilableType: c.isNilableType(fieldType), + isNilCheck: true, + } + switch { + case c.isMovePointerPositionFromHeadToFirstMarshalJSONFieldCase(fieldType, isIndirectSpecialCase): + code, err := c.marshalJSONCode(fieldType) + if err != nil { + return nil, err + } + fieldCode.value = code + fieldCode.isAddrForMarshaler = true + fieldCode.isNilCheck = false + structCode.isIndirect = false + structCode.disableIndirectConversion = true + case c.isMovePointerPositionFromHeadToFirstMarshalTextFieldCase(fieldType, isIndirectSpecialCase): + code, err := c.marshalTextCode(fieldType) + if err != nil { + return nil, err + } + fieldCode.value = code + fieldCode.isAddrForMarshaler = true + fieldCode.isNilCheck = false + structCode.isIndirect = false + structCode.disableIndirectConversion = true + case isPtr && c.isPtrMarshalJSONType(fieldType): + // *struct{ field T } + // func (*T) MarshalJSON() ([]byte, error) + code, err := c.marshalJSONCode(fieldType) + if err != nil { + return nil, err + } + fieldCode.value = code + fieldCode.isAddrForMarshaler = true + fieldCode.isNilCheck = false + case isPtr && c.isPtrMarshalTextType(fieldType): + // *struct{ field T } + // func (*T) MarshalText() ([]byte, error) + code, err := c.marshalTextCode(fieldType) + if err != nil { + return nil, err + } + fieldCode.value = code + fieldCode.isAddrForMarshaler = true + fieldCode.isNilCheck = false + default: + code, err := c.typeToCodeWithPtr(fieldType, isPtr) + if err != nil { + return nil, err + } + switch code.Kind() { + case CodeKindPtr, CodeKindInterface: + fieldCode.isNextOpPtrType = true + } + fieldCode.value = code + } + return fieldCode, nil +} + +func (c *Compiler) isAssignableIndirect(fieldCode *StructFieldCode, isPtr bool) bool { + if isPtr { + return false + } + codeType := fieldCode.value.Kind() + if codeType == CodeKindMarshalJSON { + return false + } + if codeType == CodeKindMarshalText { + return false + } + return true +} + +func (c *Compiler) getFieldMap(fields []*StructFieldCode) map[string][]*StructFieldCode { + fieldMap := map[string][]*StructFieldCode{} + for _, field := range fields { + if field.isAnonymous { + for k, v := range c.getAnonymousFieldMap(field) { + fieldMap[k] = append(fieldMap[k], v...) + } + continue + } + fieldMap[field.key] = append(fieldMap[field.key], field) + } + return fieldMap +} + +func (c *Compiler) getAnonymousFieldMap(field *StructFieldCode) map[string][]*StructFieldCode { + fieldMap := map[string][]*StructFieldCode{} + structCode := field.getAnonymousStruct() + if structCode == nil || structCode.isRecursive { + fieldMap[field.key] = append(fieldMap[field.key], field) + return fieldMap + } + for k, v := range c.getFieldMapFromAnonymousParent(structCode.fields) { + fieldMap[k] = append(fieldMap[k], v...) + } + return fieldMap +} + +func (c *Compiler) getFieldMapFromAnonymousParent(fields []*StructFieldCode) map[string][]*StructFieldCode { + fieldMap := map[string][]*StructFieldCode{} + for _, field := range fields { + if field.isAnonymous { + for k, v := range c.getAnonymousFieldMap(field) { + // Do not handle tagged key when embedding more than once + for _, vv := range v { + vv.isTaggedKey = false + } + fieldMap[k] = append(fieldMap[k], v...) + } + continue + } + fieldMap[field.key] = append(fieldMap[field.key], field) + } + return fieldMap +} + +func (c *Compiler) getDuplicatedFieldMap(fieldMap map[string][]*StructFieldCode) map[*StructFieldCode]struct{} { + duplicatedFieldMap := map[*StructFieldCode]struct{}{} + for _, fields := range fieldMap { + if len(fields) == 1 { + continue + } + if c.isTaggedKeyOnly(fields) { + for _, field := range fields { + if field.isTaggedKey { + continue + } + duplicatedFieldMap[field] = struct{}{} + } + } else { + for _, field := range fields { + duplicatedFieldMap[field] = struct{}{} + } + } + } + return duplicatedFieldMap +} + +func (c *Compiler) filteredDuplicatedFields(fields []*StructFieldCode, duplicatedFieldMap map[*StructFieldCode]struct{}) []*StructFieldCode { + filteredFields := make([]*StructFieldCode, 0, len(fields)) + for _, field := range fields { + if field.isAnonymous { + structCode := field.getAnonymousStruct() + if structCode != nil && !structCode.isRecursive { + structCode.fields = c.filteredDuplicatedFields(structCode.fields, duplicatedFieldMap) + if len(structCode.fields) > 0 { + filteredFields = append(filteredFields, field) + } + continue + } + } + if _, exists := duplicatedFieldMap[field]; exists { + continue + } + filteredFields = append(filteredFields, field) + } + return filteredFields +} + +func (c *Compiler) isTaggedKeyOnly(fields []*StructFieldCode) bool { + var taggedKeyFieldCount int + for _, field := range fields { + if field.isTaggedKey { + taggedKeyFieldCount++ + } + } + return taggedKeyFieldCount == 1 +} + +func (c *Compiler) typeToStructTags(typ *runtime.Type) runtime.StructTags { + tags := runtime.StructTags{} + fieldNum := typ.NumField() + for i := 0; i < fieldNum; i++ { + field := typ.Field(i) + if runtime.IsIgnoredStructField(field) { + continue + } + tags = append(tags, runtime.StructTagFromField(field)) + } + return tags +} + +// *struct{ field T } => struct { field *T } +// func (*T) MarshalJSON() ([]byte, error) +func (c *Compiler) isMovePointerPositionFromHeadToFirstMarshalJSONFieldCase(typ *runtime.Type, isIndirectSpecialCase bool) bool { + return isIndirectSpecialCase && !c.isNilableType(typ) && c.isPtrMarshalJSONType(typ) +} + +// *struct{ field T } => struct { field *T } +// func (*T) MarshalText() ([]byte, error) +func (c *Compiler) isMovePointerPositionFromHeadToFirstMarshalTextFieldCase(typ *runtime.Type, isIndirectSpecialCase bool) bool { + return isIndirectSpecialCase && !c.isNilableType(typ) && c.isPtrMarshalTextType(typ) +} + +func (c *Compiler) implementsMarshalJSON(typ *runtime.Type) bool { + if !c.implementsMarshalJSONType(typ) { + return false + } + if typ.Kind() != reflect.Ptr { + return true + } + // type kind is reflect.Ptr + if !c.implementsMarshalJSONType(typ.Elem()) { + return true + } + // needs to dereference + return false +} + +func (c *Compiler) implementsMarshalText(typ *runtime.Type) bool { + if !typ.Implements(marshalTextType) { + return false + } + if typ.Kind() != reflect.Ptr { + return true + } + // type kind is reflect.Ptr + if !typ.Elem().Implements(marshalTextType) { + return true + } + // needs to dereference + return false +} + +func (c *Compiler) isNilableType(typ *runtime.Type) bool { + if !runtime.IfaceIndir(typ) { + return true + } + switch typ.Kind() { + case reflect.Ptr: + return true + case reflect.Map: + return true + case reflect.Func: + return true + default: + return false + } +} + +func (c *Compiler) implementsMarshalJSONType(typ *runtime.Type) bool { + return typ.Implements(marshalJSONType) || typ.Implements(marshalJSONContextType) +} + +func (c *Compiler) isPtrMarshalJSONType(typ *runtime.Type) bool { + return !c.implementsMarshalJSONType(typ) && c.implementsMarshalJSONType(runtime.PtrTo(typ)) +} + +func (c *Compiler) isPtrMarshalTextType(typ *runtime.Type) bool { + return !typ.Implements(marshalTextType) && runtime.PtrTo(typ).Implements(marshalTextType) +} + +func (c *Compiler) codeToOpcode(ctx *compileContext, typ *runtime.Type, code Code) *Opcode { + codes := code.ToOpcode(ctx) + codes.Last().Next = newEndOp(ctx, typ) + c.linkRecursiveCode(ctx) + return codes.First() +} + +func (c *Compiler) linkRecursiveCode(ctx *compileContext) { + recursiveCodes := map[uintptr]*CompiledCode{} + for _, recursive := range *ctx.recursiveCodes { + typeptr := uintptr(unsafe.Pointer(recursive.Type)) + codes := ctx.structTypeToCodes[typeptr] + if recursiveCode, ok := recursiveCodes[typeptr]; ok { + *recursive.Jmp = *recursiveCode + continue + } + + code := copyOpcode(codes.First()) + code.Op = code.Op.PtrHeadToHead() + lastCode := newEndOp(&compileContext{}, recursive.Type) + lastCode.Op = OpRecursiveEnd + + // OpRecursiveEnd must set before call TotalLength + code.End.Next = lastCode + + totalLength := code.TotalLength() + + // Idx, ElemIdx, Length must set after call TotalLength + lastCode.Idx = uint32((totalLength + 1) * uintptrSize) + lastCode.ElemIdx = lastCode.Idx + uintptrSize + lastCode.Length = lastCode.Idx + 2*uintptrSize + + // extend length to alloc slot for elemIdx + length + curTotalLength := uintptr(recursive.TotalLength()) + 3 + nextTotalLength := uintptr(totalLength) + 3 + + compiled := recursive.Jmp + compiled.Code = code + compiled.CurLen = curTotalLength + compiled.NextLen = nextTotalLength + compiled.Linked = true + + recursiveCodes[typeptr] = compiled + } +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/compiler_norace.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/compiler_norace.go new file mode 100644 index 00000000..20c93cbf --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/compiler_norace.go @@ -0,0 +1,32 @@ +//go:build !race +// +build !race + +package encoder + +func CompileToGetCodeSet(ctx *RuntimeContext, typeptr uintptr) (*OpcodeSet, error) { + if typeptr > typeAddr.MaxTypeAddr || typeptr < typeAddr.BaseTypeAddr { + codeSet, err := compileToGetCodeSetSlowPath(typeptr) + if err != nil { + return nil, err + } + return getFilteredCodeSetIfNeeded(ctx, codeSet) + } + index := (typeptr - typeAddr.BaseTypeAddr) >> typeAddr.AddrShift + if codeSet := cachedOpcodeSets[index]; codeSet != nil { + filtered, err := getFilteredCodeSetIfNeeded(ctx, codeSet) + if err != nil { + return nil, err + } + return filtered, nil + } + codeSet, err := newCompiler().compile(typeptr) + if err != nil { + return nil, err + } + filtered, err := getFilteredCodeSetIfNeeded(ctx, codeSet) + if err != nil { + return nil, err + } + cachedOpcodeSets[index] = codeSet + return filtered, nil +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/compiler_race.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/compiler_race.go new file mode 100644 index 00000000..13ba23fd --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/compiler_race.go @@ -0,0 +1,45 @@ +//go:build race +// +build race + +package encoder + +import ( + "sync" +) + +var setsMu sync.RWMutex + +func CompileToGetCodeSet(ctx *RuntimeContext, typeptr uintptr) (*OpcodeSet, error) { + if typeptr > typeAddr.MaxTypeAddr || typeptr < typeAddr.BaseTypeAddr { + codeSet, err := compileToGetCodeSetSlowPath(typeptr) + if err != nil { + return nil, err + } + return getFilteredCodeSetIfNeeded(ctx, codeSet) + } + index := (typeptr - typeAddr.BaseTypeAddr) >> typeAddr.AddrShift + setsMu.RLock() + if codeSet := cachedOpcodeSets[index]; codeSet != nil { + filtered, err := getFilteredCodeSetIfNeeded(ctx, codeSet) + if err != nil { + setsMu.RUnlock() + return nil, err + } + setsMu.RUnlock() + return filtered, nil + } + setsMu.RUnlock() + + codeSet, err := newCompiler().compile(typeptr) + if err != nil { + return nil, err + } + filtered, err := getFilteredCodeSetIfNeeded(ctx, codeSet) + if err != nil { + return nil, err + } + setsMu.Lock() + cachedOpcodeSets[index] = codeSet + setsMu.Unlock() + return filtered, nil +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/context.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/context.go new file mode 100644 index 00000000..3833d0c8 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/context.go @@ -0,0 +1,105 @@ +package encoder + +import ( + "context" + "sync" + "unsafe" + + "github.com/goccy/go-json/internal/runtime" +) + +type compileContext struct { + opcodeIndex uint32 + ptrIndex int + indent uint32 + escapeKey bool + structTypeToCodes map[uintptr]Opcodes + recursiveCodes *Opcodes +} + +func (c *compileContext) incIndent() { + c.indent++ +} + +func (c *compileContext) decIndent() { + c.indent-- +} + +func (c *compileContext) incIndex() { + c.incOpcodeIndex() + c.incPtrIndex() +} + +func (c *compileContext) decIndex() { + c.decOpcodeIndex() + c.decPtrIndex() +} + +func (c *compileContext) incOpcodeIndex() { + c.opcodeIndex++ +} + +func (c *compileContext) decOpcodeIndex() { + c.opcodeIndex-- +} + +func (c *compileContext) incPtrIndex() { + c.ptrIndex++ +} + +func (c *compileContext) decPtrIndex() { + c.ptrIndex-- +} + +const ( + bufSize = 1024 +) + +var ( + runtimeContextPool = sync.Pool{ + New: func() interface{} { + return &RuntimeContext{ + Buf: make([]byte, 0, bufSize), + Ptrs: make([]uintptr, 128), + KeepRefs: make([]unsafe.Pointer, 0, 8), + Option: &Option{}, + } + }, + } +) + +type RuntimeContext struct { + Context context.Context + Buf []byte + MarshalBuf []byte + Ptrs []uintptr + KeepRefs []unsafe.Pointer + SeenPtr []uintptr + BaseIndent uint32 + Prefix []byte + IndentStr []byte + Option *Option +} + +func (c *RuntimeContext) Init(p uintptr, codelen int) { + if len(c.Ptrs) < codelen { + c.Ptrs = make([]uintptr, codelen) + } + c.Ptrs[0] = p + c.KeepRefs = c.KeepRefs[:0] + c.SeenPtr = c.SeenPtr[:0] + c.BaseIndent = 0 +} + +func (c *RuntimeContext) Ptr() uintptr { + header := (*runtime.SliceHeader)(unsafe.Pointer(&c.Ptrs)) + return uintptr(header.Data) +} + +func TakeRuntimeContext() *RuntimeContext { + return runtimeContextPool.Get().(*RuntimeContext) +} + +func ReleaseRuntimeContext(ctx *RuntimeContext) { + runtimeContextPool.Put(ctx) +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/decode_rune.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/decode_rune.go new file mode 100644 index 00000000..35c959d4 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/decode_rune.go @@ -0,0 +1,126 @@ +package encoder + +import "unicode/utf8" + +const ( + // The default lowest and highest continuation byte. + locb = 128 //0b10000000 + hicb = 191 //0b10111111 + + // These names of these constants are chosen to give nice alignment in the + // table below. The first nibble is an index into acceptRanges or F for + // special one-byte cases. The second nibble is the Rune length or the + // Status for the special one-byte case. + xx = 0xF1 // invalid: size 1 + as = 0xF0 // ASCII: size 1 + s1 = 0x02 // accept 0, size 2 + s2 = 0x13 // accept 1, size 3 + s3 = 0x03 // accept 0, size 3 + s4 = 0x23 // accept 2, size 3 + s5 = 0x34 // accept 3, size 4 + s6 = 0x04 // accept 0, size 4 + s7 = 0x44 // accept 4, size 4 +) + +// first is information about the first byte in a UTF-8 sequence. +var first = [256]uint8{ + // 1 2 3 4 5 6 7 8 9 A B C D E F + as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x00-0x0F + as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x10-0x1F + as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x20-0x2F + as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x30-0x3F + as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x40-0x4F + as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x50-0x5F + as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x60-0x6F + as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x70-0x7F + // 1 2 3 4 5 6 7 8 9 A B C D E F + xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0x80-0x8F + xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0x90-0x9F + xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0xA0-0xAF + xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0xB0-0xBF + xx, xx, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, // 0xC0-0xCF + s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, // 0xD0-0xDF + s2, s3, s3, s3, s3, s3, s3, s3, s3, s3, s3, s3, s3, s4, s3, s3, // 0xE0-0xEF + s5, s6, s6, s6, s7, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0xF0-0xFF +} + +const ( + lineSep = byte(168) //'\u2028' + paragraphSep = byte(169) //'\u2029' +) + +type decodeRuneState int + +const ( + validUTF8State decodeRuneState = iota + runeErrorState + lineSepState + paragraphSepState +) + +func decodeRuneInString(s string) (decodeRuneState, int) { + n := len(s) + s0 := s[0] + x := first[s0] + if x >= as { + // The following code simulates an additional check for x == xx and + // handling the ASCII and invalid cases accordingly. This mask-and-or + // approach prevents an additional branch. + mask := rune(x) << 31 >> 31 // Create 0x0000 or 0xFFFF. + if rune(s[0])&^mask|utf8.RuneError&mask == utf8.RuneError { + return runeErrorState, 1 + } + return validUTF8State, 1 + } + sz := int(x & 7) + if n < sz { + return runeErrorState, 1 + } + s1 := s[1] + switch x >> 4 { + case 0: + if s1 < locb || hicb < s1 { + return runeErrorState, 1 + } + case 1: + if s1 < 0xA0 || hicb < s1 { + return runeErrorState, 1 + } + case 2: + if s1 < locb || 0x9F < s1 { + return runeErrorState, 1 + } + case 3: + if s1 < 0x90 || hicb < s1 { + return runeErrorState, 1 + } + case 4: + if s1 < locb || 0x8F < s1 { + return runeErrorState, 1 + } + } + if sz <= 2 { + return validUTF8State, 2 + } + s2 := s[2] + if s2 < locb || hicb < s2 { + return runeErrorState, 1 + } + if sz <= 3 { + // separator character prefixes: [2]byte{226, 128} + if s0 == 226 && s1 == 128 { + switch s2 { + case lineSep: + return lineSepState, 3 + case paragraphSep: + return paragraphSepState, 3 + } + } + return validUTF8State, 3 + } + s3 := s[3] + if s3 < locb || hicb < s3 { + return runeErrorState, 1 + } + return validUTF8State, 4 +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/encoder.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/encoder.go new file mode 100644 index 00000000..14eb6a0d --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/encoder.go @@ -0,0 +1,596 @@ +package encoder + +import ( + "bytes" + "encoding" + "encoding/base64" + "encoding/json" + "fmt" + "math" + "reflect" + "strconv" + "strings" + "sync" + "unsafe" + + "github.com/goccy/go-json/internal/errors" + "github.com/goccy/go-json/internal/runtime" +) + +func (t OpType) IsMultipleOpHead() bool { + switch t { + case OpStructHead: + return true + case OpStructHeadSlice: + return true + case OpStructHeadArray: + return true + case OpStructHeadMap: + return true + case OpStructHeadStruct: + return true + case OpStructHeadOmitEmpty: + return true + case OpStructHeadOmitEmptySlice: + return true + case OpStructHeadOmitEmptyArray: + return true + case OpStructHeadOmitEmptyMap: + return true + case OpStructHeadOmitEmptyStruct: + return true + case OpStructHeadSlicePtr: + return true + case OpStructHeadOmitEmptySlicePtr: + return true + case OpStructHeadArrayPtr: + return true + case OpStructHeadOmitEmptyArrayPtr: + return true + case OpStructHeadMapPtr: + return true + case OpStructHeadOmitEmptyMapPtr: + return true + } + return false +} + +func (t OpType) IsMultipleOpField() bool { + switch t { + case OpStructField: + return true + case OpStructFieldSlice: + return true + case OpStructFieldArray: + return true + case OpStructFieldMap: + return true + case OpStructFieldStruct: + return true + case OpStructFieldOmitEmpty: + return true + case OpStructFieldOmitEmptySlice: + return true + case OpStructFieldOmitEmptyArray: + return true + case OpStructFieldOmitEmptyMap: + return true + case OpStructFieldOmitEmptyStruct: + return true + case OpStructFieldSlicePtr: + return true + case OpStructFieldOmitEmptySlicePtr: + return true + case OpStructFieldArrayPtr: + return true + case OpStructFieldOmitEmptyArrayPtr: + return true + case OpStructFieldMapPtr: + return true + case OpStructFieldOmitEmptyMapPtr: + return true + } + return false +} + +type OpcodeSet struct { + Type *runtime.Type + NoescapeKeyCode *Opcode + EscapeKeyCode *Opcode + InterfaceNoescapeKeyCode *Opcode + InterfaceEscapeKeyCode *Opcode + CodeLength int + EndCode *Opcode + Code Code + QueryCache map[string]*OpcodeSet + cacheMu sync.RWMutex +} + +func (s *OpcodeSet) getQueryCache(hash string) *OpcodeSet { + s.cacheMu.RLock() + codeSet := s.QueryCache[hash] + s.cacheMu.RUnlock() + return codeSet +} + +func (s *OpcodeSet) setQueryCache(hash string, codeSet *OpcodeSet) { + s.cacheMu.Lock() + s.QueryCache[hash] = codeSet + s.cacheMu.Unlock() +} + +type CompiledCode struct { + Code *Opcode + Linked bool // whether recursive code already have linked + CurLen uintptr + NextLen uintptr +} + +const StartDetectingCyclesAfter = 1000 + +func Load(base uintptr, idx uintptr) uintptr { + addr := base + idx + return **(**uintptr)(unsafe.Pointer(&addr)) +} + +func Store(base uintptr, idx uintptr, p uintptr) { + addr := base + idx + **(**uintptr)(unsafe.Pointer(&addr)) = p +} + +func LoadNPtr(base uintptr, idx uintptr, ptrNum int) uintptr { + addr := base + idx + p := **(**uintptr)(unsafe.Pointer(&addr)) + if p == 0 { + return 0 + } + return PtrToPtr(p) + /* + for i := 0; i < ptrNum; i++ { + if p == 0 { + return p + } + p = PtrToPtr(p) + } + return p + */ +} + +func PtrToUint64(p uintptr) uint64 { return **(**uint64)(unsafe.Pointer(&p)) } +func PtrToFloat32(p uintptr) float32 { return **(**float32)(unsafe.Pointer(&p)) } +func PtrToFloat64(p uintptr) float64 { return **(**float64)(unsafe.Pointer(&p)) } +func PtrToBool(p uintptr) bool { return **(**bool)(unsafe.Pointer(&p)) } +func PtrToBytes(p uintptr) []byte { return **(**[]byte)(unsafe.Pointer(&p)) } +func PtrToNumber(p uintptr) json.Number { return **(**json.Number)(unsafe.Pointer(&p)) } +func PtrToString(p uintptr) string { return **(**string)(unsafe.Pointer(&p)) } +func PtrToSlice(p uintptr) *runtime.SliceHeader { return *(**runtime.SliceHeader)(unsafe.Pointer(&p)) } +func PtrToPtr(p uintptr) uintptr { + return uintptr(**(**unsafe.Pointer)(unsafe.Pointer(&p))) +} +func PtrToNPtr(p uintptr, ptrNum int) uintptr { + for i := 0; i < ptrNum; i++ { + if p == 0 { + return 0 + } + p = PtrToPtr(p) + } + return p +} + +func PtrToUnsafePtr(p uintptr) unsafe.Pointer { + return *(*unsafe.Pointer)(unsafe.Pointer(&p)) +} +func PtrToInterface(code *Opcode, p uintptr) interface{} { + return *(*interface{})(unsafe.Pointer(&emptyInterface{ + typ: code.Type, + ptr: *(*unsafe.Pointer)(unsafe.Pointer(&p)), + })) +} + +func ErrUnsupportedValue(code *Opcode, ptr uintptr) *errors.UnsupportedValueError { + v := *(*interface{})(unsafe.Pointer(&emptyInterface{ + typ: code.Type, + ptr: *(*unsafe.Pointer)(unsafe.Pointer(&ptr)), + })) + return &errors.UnsupportedValueError{ + Value: reflect.ValueOf(v), + Str: fmt.Sprintf("encountered a cycle via %s", code.Type), + } +} + +func ErrUnsupportedFloat(v float64) *errors.UnsupportedValueError { + return &errors.UnsupportedValueError{ + Value: reflect.ValueOf(v), + Str: strconv.FormatFloat(v, 'g', -1, 64), + } +} + +func ErrMarshalerWithCode(code *Opcode, err error) *errors.MarshalerError { + return &errors.MarshalerError{ + Type: runtime.RType2Type(code.Type), + Err: err, + } +} + +type emptyInterface struct { + typ *runtime.Type + ptr unsafe.Pointer +} + +type MapItem struct { + Key []byte + Value []byte +} + +type Mapslice struct { + Items []MapItem +} + +func (m *Mapslice) Len() int { + return len(m.Items) +} + +func (m *Mapslice) Less(i, j int) bool { + return bytes.Compare(m.Items[i].Key, m.Items[j].Key) < 0 +} + +func (m *Mapslice) Swap(i, j int) { + m.Items[i], m.Items[j] = m.Items[j], m.Items[i] +} + +//nolint:structcheck,unused +type mapIter struct { + key unsafe.Pointer + elem unsafe.Pointer + t unsafe.Pointer + h unsafe.Pointer + buckets unsafe.Pointer + bptr unsafe.Pointer + overflow unsafe.Pointer + oldoverflow unsafe.Pointer + startBucket uintptr + offset uint8 + wrapped bool + B uint8 + i uint8 + bucket uintptr + checkBucket uintptr +} + +type MapContext struct { + Start int + First int + Idx int + Slice *Mapslice + Buf []byte + Len int + Iter mapIter +} + +var mapContextPool = sync.Pool{ + New: func() interface{} { + return &MapContext{ + Slice: &Mapslice{}, + } + }, +} + +func NewMapContext(mapLen int, unorderedMap bool) *MapContext { + ctx := mapContextPool.Get().(*MapContext) + if !unorderedMap { + if len(ctx.Slice.Items) < mapLen { + ctx.Slice.Items = make([]MapItem, mapLen) + } else { + ctx.Slice.Items = ctx.Slice.Items[:mapLen] + } + } + ctx.Buf = ctx.Buf[:0] + ctx.Iter = mapIter{} + ctx.Idx = 0 + ctx.Len = mapLen + return ctx +} + +func ReleaseMapContext(c *MapContext) { + mapContextPool.Put(c) +} + +//go:linkname MapIterInit runtime.mapiterinit +//go:noescape +func MapIterInit(mapType *runtime.Type, m unsafe.Pointer, it *mapIter) + +//go:linkname MapIterKey reflect.mapiterkey +//go:noescape +func MapIterKey(it *mapIter) unsafe.Pointer + +//go:linkname MapIterNext reflect.mapiternext +//go:noescape +func MapIterNext(it *mapIter) + +//go:linkname MapLen reflect.maplen +//go:noescape +func MapLen(m unsafe.Pointer) int + +func AppendByteSlice(_ *RuntimeContext, b []byte, src []byte) []byte { + if src == nil { + return append(b, `null`...) + } + encodedLen := base64.StdEncoding.EncodedLen(len(src)) + b = append(b, '"') + pos := len(b) + remainLen := cap(b[pos:]) + var buf []byte + if remainLen > encodedLen { + buf = b[pos : pos+encodedLen] + } else { + buf = make([]byte, encodedLen) + } + base64.StdEncoding.Encode(buf, src) + return append(append(b, buf...), '"') +} + +func AppendFloat32(_ *RuntimeContext, b []byte, v float32) []byte { + f64 := float64(v) + abs := math.Abs(f64) + fmt := byte('f') + // Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right. + if abs != 0 { + f32 := float32(abs) + if f32 < 1e-6 || f32 >= 1e21 { + fmt = 'e' + } + } + return strconv.AppendFloat(b, f64, fmt, -1, 32) +} + +func AppendFloat64(_ *RuntimeContext, b []byte, v float64) []byte { + abs := math.Abs(v) + fmt := byte('f') + // Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right. + if abs != 0 { + if abs < 1e-6 || abs >= 1e21 { + fmt = 'e' + } + } + return strconv.AppendFloat(b, v, fmt, -1, 64) +} + +func AppendBool(_ *RuntimeContext, b []byte, v bool) []byte { + if v { + return append(b, "true"...) + } + return append(b, "false"...) +} + +var ( + floatTable = [256]bool{ + '0': true, + '1': true, + '2': true, + '3': true, + '4': true, + '5': true, + '6': true, + '7': true, + '8': true, + '9': true, + '.': true, + 'e': true, + 'E': true, + '+': true, + '-': true, + } +) + +func AppendNumber(_ *RuntimeContext, b []byte, n json.Number) ([]byte, error) { + if len(n) == 0 { + return append(b, '0'), nil + } + for i := 0; i < len(n); i++ { + if !floatTable[n[i]] { + return nil, fmt.Errorf("json: invalid number literal %q", n) + } + } + b = append(b, n...) + return b, nil +} + +func AppendMarshalJSON(ctx *RuntimeContext, code *Opcode, b []byte, v interface{}) ([]byte, error) { + rv := reflect.ValueOf(v) // convert by dynamic interface type + if (code.Flags & AddrForMarshalerFlags) != 0 { + if rv.CanAddr() { + rv = rv.Addr() + } else { + newV := reflect.New(rv.Type()) + newV.Elem().Set(rv) + rv = newV + } + } + v = rv.Interface() + var bb []byte + if (code.Flags & MarshalerContextFlags) != 0 { + marshaler, ok := v.(marshalerContext) + if !ok { + return AppendNull(ctx, b), nil + } + stdctx := ctx.Option.Context + if ctx.Option.Flag&FieldQueryOption != 0 { + stdctx = SetFieldQueryToContext(stdctx, code.FieldQuery) + } + b, err := marshaler.MarshalJSON(stdctx) + if err != nil { + return nil, &errors.MarshalerError{Type: reflect.TypeOf(v), Err: err} + } + bb = b + } else { + marshaler, ok := v.(json.Marshaler) + if !ok { + return AppendNull(ctx, b), nil + } + b, err := marshaler.MarshalJSON() + if err != nil { + return nil, &errors.MarshalerError{Type: reflect.TypeOf(v), Err: err} + } + bb = b + } + marshalBuf := ctx.MarshalBuf[:0] + marshalBuf = append(append(marshalBuf, bb...), nul) + compactedBuf, err := compact(b, marshalBuf, (ctx.Option.Flag&HTMLEscapeOption) != 0) + if err != nil { + return nil, &errors.MarshalerError{Type: reflect.TypeOf(v), Err: err} + } + ctx.MarshalBuf = marshalBuf + return compactedBuf, nil +} + +func AppendMarshalJSONIndent(ctx *RuntimeContext, code *Opcode, b []byte, v interface{}) ([]byte, error) { + rv := reflect.ValueOf(v) // convert by dynamic interface type + if (code.Flags & AddrForMarshalerFlags) != 0 { + if rv.CanAddr() { + rv = rv.Addr() + } else { + newV := reflect.New(rv.Type()) + newV.Elem().Set(rv) + rv = newV + } + } + v = rv.Interface() + var bb []byte + if (code.Flags & MarshalerContextFlags) != 0 { + marshaler, ok := v.(marshalerContext) + if !ok { + return AppendNull(ctx, b), nil + } + b, err := marshaler.MarshalJSON(ctx.Option.Context) + if err != nil { + return nil, &errors.MarshalerError{Type: reflect.TypeOf(v), Err: err} + } + bb = b + } else { + marshaler, ok := v.(json.Marshaler) + if !ok { + return AppendNull(ctx, b), nil + } + b, err := marshaler.MarshalJSON() + if err != nil { + return nil, &errors.MarshalerError{Type: reflect.TypeOf(v), Err: err} + } + bb = b + } + marshalBuf := ctx.MarshalBuf[:0] + marshalBuf = append(append(marshalBuf, bb...), nul) + indentedBuf, err := doIndent( + b, + marshalBuf, + string(ctx.Prefix)+strings.Repeat(string(ctx.IndentStr), int(ctx.BaseIndent+code.Indent)), + string(ctx.IndentStr), + (ctx.Option.Flag&HTMLEscapeOption) != 0, + ) + if err != nil { + return nil, &errors.MarshalerError{Type: reflect.TypeOf(v), Err: err} + } + ctx.MarshalBuf = marshalBuf + return indentedBuf, nil +} + +func AppendMarshalText(ctx *RuntimeContext, code *Opcode, b []byte, v interface{}) ([]byte, error) { + rv := reflect.ValueOf(v) // convert by dynamic interface type + if (code.Flags & AddrForMarshalerFlags) != 0 { + if rv.CanAddr() { + rv = rv.Addr() + } else { + newV := reflect.New(rv.Type()) + newV.Elem().Set(rv) + rv = newV + } + } + v = rv.Interface() + marshaler, ok := v.(encoding.TextMarshaler) + if !ok { + return AppendNull(ctx, b), nil + } + bytes, err := marshaler.MarshalText() + if err != nil { + return nil, &errors.MarshalerError{Type: reflect.TypeOf(v), Err: err} + } + return AppendString(ctx, b, *(*string)(unsafe.Pointer(&bytes))), nil +} + +func AppendMarshalTextIndent(ctx *RuntimeContext, code *Opcode, b []byte, v interface{}) ([]byte, error) { + rv := reflect.ValueOf(v) // convert by dynamic interface type + if (code.Flags & AddrForMarshalerFlags) != 0 { + if rv.CanAddr() { + rv = rv.Addr() + } else { + newV := reflect.New(rv.Type()) + newV.Elem().Set(rv) + rv = newV + } + } + v = rv.Interface() + marshaler, ok := v.(encoding.TextMarshaler) + if !ok { + return AppendNull(ctx, b), nil + } + bytes, err := marshaler.MarshalText() + if err != nil { + return nil, &errors.MarshalerError{Type: reflect.TypeOf(v), Err: err} + } + return AppendString(ctx, b, *(*string)(unsafe.Pointer(&bytes))), nil +} + +func AppendNull(_ *RuntimeContext, b []byte) []byte { + return append(b, "null"...) +} + +func AppendComma(_ *RuntimeContext, b []byte) []byte { + return append(b, ',') +} + +func AppendCommaIndent(_ *RuntimeContext, b []byte) []byte { + return append(b, ',', '\n') +} + +func AppendStructEnd(_ *RuntimeContext, b []byte) []byte { + return append(b, '}', ',') +} + +func AppendStructEndIndent(ctx *RuntimeContext, code *Opcode, b []byte) []byte { + b = append(b, '\n') + b = append(b, ctx.Prefix...) + indentNum := ctx.BaseIndent + code.Indent - 1 + for i := uint32(0); i < indentNum; i++ { + b = append(b, ctx.IndentStr...) + } + return append(b, '}', ',', '\n') +} + +func AppendIndent(ctx *RuntimeContext, b []byte, indent uint32) []byte { + b = append(b, ctx.Prefix...) + indentNum := ctx.BaseIndent + indent + for i := uint32(0); i < indentNum; i++ { + b = append(b, ctx.IndentStr...) + } + return b +} + +func IsNilForMarshaler(v interface{}) bool { + rv := reflect.ValueOf(v) + switch rv.Kind() { + case reflect.Bool: + return !rv.Bool() + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return rv.Int() == 0 + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return rv.Uint() == 0 + case reflect.Float32, reflect.Float64: + return math.Float64bits(rv.Float()) == 0 + case reflect.Interface, reflect.Map, reflect.Ptr, reflect.Func: + return rv.IsNil() + case reflect.Slice: + return rv.IsNil() || rv.Len() == 0 + case reflect.String: + return rv.Len() == 0 + } + return false +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/indent.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/indent.go new file mode 100644 index 00000000..dfe04b5e --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/indent.go @@ -0,0 +1,211 @@ +package encoder + +import ( + "bytes" + "fmt" + + "github.com/goccy/go-json/internal/errors" +) + +func takeIndentSrcRuntimeContext(src []byte) (*RuntimeContext, []byte) { + ctx := TakeRuntimeContext() + buf := ctx.Buf[:0] + buf = append(append(buf, src...), nul) + ctx.Buf = buf + return ctx, buf +} + +func Indent(buf *bytes.Buffer, src []byte, prefix, indentStr string) error { + if len(src) == 0 { + return errors.ErrUnexpectedEndOfJSON("", 0) + } + + srcCtx, srcBuf := takeIndentSrcRuntimeContext(src) + dstCtx := TakeRuntimeContext() + dst := dstCtx.Buf[:0] + + dst, err := indentAndWrite(buf, dst, srcBuf, prefix, indentStr) + if err != nil { + ReleaseRuntimeContext(srcCtx) + ReleaseRuntimeContext(dstCtx) + return err + } + dstCtx.Buf = dst + ReleaseRuntimeContext(srcCtx) + ReleaseRuntimeContext(dstCtx) + return nil +} + +func indentAndWrite(buf *bytes.Buffer, dst []byte, src []byte, prefix, indentStr string) ([]byte, error) { + dst, err := doIndent(dst, src, prefix, indentStr, false) + if err != nil { + return nil, err + } + if _, err := buf.Write(dst); err != nil { + return nil, err + } + return dst, nil +} + +func doIndent(dst, src []byte, prefix, indentStr string, escape bool) ([]byte, error) { + buf, cursor, err := indentValue(dst, src, 0, 0, []byte(prefix), []byte(indentStr), escape) + if err != nil { + return nil, err + } + if err := validateEndBuf(src, cursor); err != nil { + return nil, err + } + return buf, nil +} + +func indentValue( + dst []byte, + src []byte, + indentNum int, + cursor int64, + prefix []byte, + indentBytes []byte, + escape bool) ([]byte, int64, error) { + for { + switch src[cursor] { + case ' ', '\t', '\n', '\r': + cursor++ + continue + case '{': + return indentObject(dst, src, indentNum, cursor, prefix, indentBytes, escape) + case '}': + return nil, 0, errors.ErrSyntax("unexpected character '}'", cursor) + case '[': + return indentArray(dst, src, indentNum, cursor, prefix, indentBytes, escape) + case ']': + return nil, 0, errors.ErrSyntax("unexpected character ']'", cursor) + case '"': + return compactString(dst, src, cursor, escape) + case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + return compactNumber(dst, src, cursor) + case 't': + return compactTrue(dst, src, cursor) + case 'f': + return compactFalse(dst, src, cursor) + case 'n': + return compactNull(dst, src, cursor) + default: + return nil, 0, errors.ErrSyntax(fmt.Sprintf("unexpected character '%c'", src[cursor]), cursor) + } + } +} + +func indentObject( + dst []byte, + src []byte, + indentNum int, + cursor int64, + prefix []byte, + indentBytes []byte, + escape bool) ([]byte, int64, error) { + if src[cursor] == '{' { + dst = append(dst, '{') + } else { + return nil, 0, errors.ErrExpected("expected { character for object value", cursor) + } + cursor = skipWhiteSpace(src, cursor+1) + if src[cursor] == '}' { + dst = append(dst, '}') + return dst, cursor + 1, nil + } + indentNum++ + var err error + for { + dst = append(append(dst, '\n'), prefix...) + for i := 0; i < indentNum; i++ { + dst = append(dst, indentBytes...) + } + cursor = skipWhiteSpace(src, cursor) + dst, cursor, err = compactString(dst, src, cursor, escape) + if err != nil { + return nil, 0, err + } + cursor = skipWhiteSpace(src, cursor) + if src[cursor] != ':' { + return nil, 0, errors.ErrSyntax( + fmt.Sprintf("invalid character '%c' after object key", src[cursor]), + cursor+1, + ) + } + dst = append(dst, ':', ' ') + dst, cursor, err = indentValue(dst, src, indentNum, cursor+1, prefix, indentBytes, escape) + if err != nil { + return nil, 0, err + } + cursor = skipWhiteSpace(src, cursor) + switch src[cursor] { + case '}': + dst = append(append(dst, '\n'), prefix...) + for i := 0; i < indentNum-1; i++ { + dst = append(dst, indentBytes...) + } + dst = append(dst, '}') + cursor++ + return dst, cursor, nil + case ',': + dst = append(dst, ',') + default: + return nil, 0, errors.ErrSyntax( + fmt.Sprintf("invalid character '%c' after object key:value pair", src[cursor]), + cursor+1, + ) + } + cursor++ + } +} + +func indentArray( + dst []byte, + src []byte, + indentNum int, + cursor int64, + prefix []byte, + indentBytes []byte, + escape bool) ([]byte, int64, error) { + if src[cursor] == '[' { + dst = append(dst, '[') + } else { + return nil, 0, errors.ErrExpected("expected [ character for array value", cursor) + } + cursor = skipWhiteSpace(src, cursor+1) + if src[cursor] == ']' { + dst = append(dst, ']') + return dst, cursor + 1, nil + } + indentNum++ + var err error + for { + dst = append(append(dst, '\n'), prefix...) + for i := 0; i < indentNum; i++ { + dst = append(dst, indentBytes...) + } + dst, cursor, err = indentValue(dst, src, indentNum, cursor, prefix, indentBytes, escape) + if err != nil { + return nil, 0, err + } + cursor = skipWhiteSpace(src, cursor) + switch src[cursor] { + case ']': + dst = append(append(dst, '\n'), prefix...) + for i := 0; i < indentNum-1; i++ { + dst = append(dst, indentBytes...) + } + dst = append(dst, ']') + cursor++ + return dst, cursor, nil + case ',': + dst = append(dst, ',') + default: + return nil, 0, errors.ErrSyntax( + fmt.Sprintf("invalid character '%c' after array value", src[cursor]), + cursor+1, + ) + } + cursor++ + } +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/int.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/int.go new file mode 100644 index 00000000..85f07960 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/int.go @@ -0,0 +1,152 @@ +package encoder + +import ( + "unsafe" +) + +var endianness int + +func init() { + var b [2]byte + *(*uint16)(unsafe.Pointer(&b)) = uint16(0xABCD) + + switch b[0] { + case 0xCD: + endianness = 0 // LE + case 0xAB: + endianness = 1 // BE + default: + panic("could not determine endianness") + } +} + +// "00010203...96979899" cast to []uint16 +var intLELookup = [100]uint16{ + 0x3030, 0x3130, 0x3230, 0x3330, 0x3430, 0x3530, 0x3630, 0x3730, 0x3830, 0x3930, + 0x3031, 0x3131, 0x3231, 0x3331, 0x3431, 0x3531, 0x3631, 0x3731, 0x3831, 0x3931, + 0x3032, 0x3132, 0x3232, 0x3332, 0x3432, 0x3532, 0x3632, 0x3732, 0x3832, 0x3932, + 0x3033, 0x3133, 0x3233, 0x3333, 0x3433, 0x3533, 0x3633, 0x3733, 0x3833, 0x3933, + 0x3034, 0x3134, 0x3234, 0x3334, 0x3434, 0x3534, 0x3634, 0x3734, 0x3834, 0x3934, + 0x3035, 0x3135, 0x3235, 0x3335, 0x3435, 0x3535, 0x3635, 0x3735, 0x3835, 0x3935, + 0x3036, 0x3136, 0x3236, 0x3336, 0x3436, 0x3536, 0x3636, 0x3736, 0x3836, 0x3936, + 0x3037, 0x3137, 0x3237, 0x3337, 0x3437, 0x3537, 0x3637, 0x3737, 0x3837, 0x3937, + 0x3038, 0x3138, 0x3238, 0x3338, 0x3438, 0x3538, 0x3638, 0x3738, 0x3838, 0x3938, + 0x3039, 0x3139, 0x3239, 0x3339, 0x3439, 0x3539, 0x3639, 0x3739, 0x3839, 0x3939, +} + +var intBELookup = [100]uint16{ + 0x3030, 0x3031, 0x3032, 0x3033, 0x3034, 0x3035, 0x3036, 0x3037, 0x3038, 0x3039, + 0x3130, 0x3131, 0x3132, 0x3133, 0x3134, 0x3135, 0x3136, 0x3137, 0x3138, 0x3139, + 0x3230, 0x3231, 0x3232, 0x3233, 0x3234, 0x3235, 0x3236, 0x3237, 0x3238, 0x3239, + 0x3330, 0x3331, 0x3332, 0x3333, 0x3334, 0x3335, 0x3336, 0x3337, 0x3338, 0x3339, + 0x3430, 0x3431, 0x3432, 0x3433, 0x3434, 0x3435, 0x3436, 0x3437, 0x3438, 0x3439, + 0x3530, 0x3531, 0x3532, 0x3533, 0x3534, 0x3535, 0x3536, 0x3537, 0x3538, 0x3539, + 0x3630, 0x3631, 0x3632, 0x3633, 0x3634, 0x3635, 0x3636, 0x3637, 0x3638, 0x3639, + 0x3730, 0x3731, 0x3732, 0x3733, 0x3734, 0x3735, 0x3736, 0x3737, 0x3738, 0x3739, + 0x3830, 0x3831, 0x3832, 0x3833, 0x3834, 0x3835, 0x3836, 0x3837, 0x3838, 0x3839, + 0x3930, 0x3931, 0x3932, 0x3933, 0x3934, 0x3935, 0x3936, 0x3937, 0x3938, 0x3939, +} + +var intLookup = [2]*[100]uint16{&intLELookup, &intBELookup} + +func numMask(numBitSize uint8) uint64 { + return 1<>(code.NumBitSize-1))&1 == 1 + if !negative { + if n < 10 { + return append(out, byte(n+'0')) + } else if n < 100 { + u := intLELookup[n] + return append(out, byte(u), byte(u>>8)) + } + } else { + n = -n & mask + } + + lookup := intLookup[endianness] + + var b [22]byte + u := (*[11]uint16)(unsafe.Pointer(&b)) + i := 11 + + for n >= 100 { + j := n % 100 + n /= 100 + i-- + u[i] = lookup[j] + } + + i-- + u[i] = lookup[n] + + i *= 2 // convert to byte index + if n < 10 { + i++ // remove leading zero + } + if negative { + i-- + b[i] = '-' + } + + return append(out, b[i:]...) +} + +func AppendUint(_ *RuntimeContext, out []byte, p uintptr, code *Opcode) []byte { + var u64 uint64 + switch code.NumBitSize { + case 8: + u64 = (uint64)(**(**uint8)(unsafe.Pointer(&p))) + case 16: + u64 = (uint64)(**(**uint16)(unsafe.Pointer(&p))) + case 32: + u64 = (uint64)(**(**uint32)(unsafe.Pointer(&p))) + case 64: + u64 = **(**uint64)(unsafe.Pointer(&p)) + } + mask := numMask(code.NumBitSize) + n := u64 & mask + if n < 10 { + return append(out, byte(n+'0')) + } else if n < 100 { + u := intLELookup[n] + return append(out, byte(u), byte(u>>8)) + } + + lookup := intLookup[endianness] + + var b [22]byte + u := (*[11]uint16)(unsafe.Pointer(&b)) + i := 11 + + for n >= 100 { + j := n % 100 + n /= 100 + i-- + u[i] = lookup[j] + } + + i-- + u[i] = lookup[n] + + i *= 2 // convert to byte index + if n < 10 { + i++ // remove leading zero + } + return append(out, b[i:]...) +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/map112.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/map112.go new file mode 100644 index 00000000..e96ffadf --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/map112.go @@ -0,0 +1,9 @@ +//go:build !go1.13 +// +build !go1.13 + +package encoder + +import "unsafe" + +//go:linkname MapIterValue reflect.mapitervalue +func MapIterValue(it *mapIter) unsafe.Pointer diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/map113.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/map113.go new file mode 100644 index 00000000..9b69dcc3 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/map113.go @@ -0,0 +1,9 @@ +//go:build go1.13 +// +build go1.13 + +package encoder + +import "unsafe" + +//go:linkname MapIterValue reflect.mapiterelem +func MapIterValue(it *mapIter) unsafe.Pointer diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/opcode.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/opcode.go new file mode 100644 index 00000000..05fc3ce0 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/opcode.go @@ -0,0 +1,669 @@ +package encoder + +import ( + "fmt" + "strings" + "unsafe" + + "github.com/goccy/go-json/internal/runtime" +) + +const uintptrSize = 4 << (^uintptr(0) >> 63) + +type OpFlags uint16 + +const ( + AnonymousHeadFlags OpFlags = 1 << 0 + AnonymousKeyFlags OpFlags = 1 << 1 + IndirectFlags OpFlags = 1 << 2 + IsTaggedKeyFlags OpFlags = 1 << 3 + NilCheckFlags OpFlags = 1 << 4 + AddrForMarshalerFlags OpFlags = 1 << 5 + IsNextOpPtrTypeFlags OpFlags = 1 << 6 + IsNilableTypeFlags OpFlags = 1 << 7 + MarshalerContextFlags OpFlags = 1 << 8 + NonEmptyInterfaceFlags OpFlags = 1 << 9 +) + +type Opcode struct { + Op OpType // operation type + Idx uint32 // offset to access ptr + Next *Opcode // next opcode + End *Opcode // array/slice/struct/map end + NextField *Opcode // next struct field + Key string // struct field key + Offset uint32 // offset size from struct header + PtrNum uint8 // pointer number: e.g. double pointer is 2. + NumBitSize uint8 + Flags OpFlags + + Type *runtime.Type // go type + Jmp *CompiledCode // for recursive call + FieldQuery *FieldQuery // field query for Interface / MarshalJSON / MarshalText + ElemIdx uint32 // offset to access array/slice elem + Length uint32 // offset to access slice length or array length + Indent uint32 // indent number + Size uint32 // array/slice elem size + DisplayIdx uint32 // opcode index + DisplayKey string // key text to display +} + +func (c *Opcode) Validate() error { + var prevIdx uint32 + for code := c; !code.IsEnd(); { + if prevIdx != 0 { + if code.DisplayIdx != prevIdx+1 { + return fmt.Errorf( + "invalid index. previous display index is %d but next is %d. dump = %s", + prevIdx, code.DisplayIdx, c.Dump(), + ) + } + } + prevIdx = code.DisplayIdx + code = code.IterNext() + } + return nil +} + +func (c *Opcode) IterNext() *Opcode { + if c == nil { + return nil + } + switch c.Op.CodeType() { + case CodeArrayElem, CodeSliceElem, CodeMapKey: + return c.End + default: + return c.Next + } +} + +func (c *Opcode) IsEnd() bool { + if c == nil { + return true + } + return c.Op == OpEnd || c.Op == OpInterfaceEnd || c.Op == OpRecursiveEnd +} + +func (c *Opcode) MaxIdx() uint32 { + max := uint32(0) + for _, value := range []uint32{ + c.Idx, + c.ElemIdx, + c.Length, + c.Size, + } { + if max < value { + max = value + } + } + return max +} + +func (c *Opcode) ToHeaderType(isString bool) OpType { + switch c.Op { + case OpInt: + if isString { + return OpStructHeadIntString + } + return OpStructHeadInt + case OpIntPtr: + if isString { + return OpStructHeadIntPtrString + } + return OpStructHeadIntPtr + case OpUint: + if isString { + return OpStructHeadUintString + } + return OpStructHeadUint + case OpUintPtr: + if isString { + return OpStructHeadUintPtrString + } + return OpStructHeadUintPtr + case OpFloat32: + if isString { + return OpStructHeadFloat32String + } + return OpStructHeadFloat32 + case OpFloat32Ptr: + if isString { + return OpStructHeadFloat32PtrString + } + return OpStructHeadFloat32Ptr + case OpFloat64: + if isString { + return OpStructHeadFloat64String + } + return OpStructHeadFloat64 + case OpFloat64Ptr: + if isString { + return OpStructHeadFloat64PtrString + } + return OpStructHeadFloat64Ptr + case OpString: + if isString { + return OpStructHeadStringString + } + return OpStructHeadString + case OpStringPtr: + if isString { + return OpStructHeadStringPtrString + } + return OpStructHeadStringPtr + case OpNumber: + if isString { + return OpStructHeadNumberString + } + return OpStructHeadNumber + case OpNumberPtr: + if isString { + return OpStructHeadNumberPtrString + } + return OpStructHeadNumberPtr + case OpBool: + if isString { + return OpStructHeadBoolString + } + return OpStructHeadBool + case OpBoolPtr: + if isString { + return OpStructHeadBoolPtrString + } + return OpStructHeadBoolPtr + case OpBytes: + return OpStructHeadBytes + case OpBytesPtr: + return OpStructHeadBytesPtr + case OpMap: + return OpStructHeadMap + case OpMapPtr: + c.Op = OpMap + return OpStructHeadMapPtr + case OpArray: + return OpStructHeadArray + case OpArrayPtr: + c.Op = OpArray + return OpStructHeadArrayPtr + case OpSlice: + return OpStructHeadSlice + case OpSlicePtr: + c.Op = OpSlice + return OpStructHeadSlicePtr + case OpMarshalJSON: + return OpStructHeadMarshalJSON + case OpMarshalJSONPtr: + return OpStructHeadMarshalJSONPtr + case OpMarshalText: + return OpStructHeadMarshalText + case OpMarshalTextPtr: + return OpStructHeadMarshalTextPtr + } + return OpStructHead +} + +func (c *Opcode) ToFieldType(isString bool) OpType { + switch c.Op { + case OpInt: + if isString { + return OpStructFieldIntString + } + return OpStructFieldInt + case OpIntPtr: + if isString { + return OpStructFieldIntPtrString + } + return OpStructFieldIntPtr + case OpUint: + if isString { + return OpStructFieldUintString + } + return OpStructFieldUint + case OpUintPtr: + if isString { + return OpStructFieldUintPtrString + } + return OpStructFieldUintPtr + case OpFloat32: + if isString { + return OpStructFieldFloat32String + } + return OpStructFieldFloat32 + case OpFloat32Ptr: + if isString { + return OpStructFieldFloat32PtrString + } + return OpStructFieldFloat32Ptr + case OpFloat64: + if isString { + return OpStructFieldFloat64String + } + return OpStructFieldFloat64 + case OpFloat64Ptr: + if isString { + return OpStructFieldFloat64PtrString + } + return OpStructFieldFloat64Ptr + case OpString: + if isString { + return OpStructFieldStringString + } + return OpStructFieldString + case OpStringPtr: + if isString { + return OpStructFieldStringPtrString + } + return OpStructFieldStringPtr + case OpNumber: + if isString { + return OpStructFieldNumberString + } + return OpStructFieldNumber + case OpNumberPtr: + if isString { + return OpStructFieldNumberPtrString + } + return OpStructFieldNumberPtr + case OpBool: + if isString { + return OpStructFieldBoolString + } + return OpStructFieldBool + case OpBoolPtr: + if isString { + return OpStructFieldBoolPtrString + } + return OpStructFieldBoolPtr + case OpBytes: + return OpStructFieldBytes + case OpBytesPtr: + return OpStructFieldBytesPtr + case OpMap: + return OpStructFieldMap + case OpMapPtr: + c.Op = OpMap + return OpStructFieldMapPtr + case OpArray: + return OpStructFieldArray + case OpArrayPtr: + c.Op = OpArray + return OpStructFieldArrayPtr + case OpSlice: + return OpStructFieldSlice + case OpSlicePtr: + c.Op = OpSlice + return OpStructFieldSlicePtr + case OpMarshalJSON: + return OpStructFieldMarshalJSON + case OpMarshalJSONPtr: + return OpStructFieldMarshalJSONPtr + case OpMarshalText: + return OpStructFieldMarshalText + case OpMarshalTextPtr: + return OpStructFieldMarshalTextPtr + } + return OpStructField +} + +func newOpCode(ctx *compileContext, typ *runtime.Type, op OpType) *Opcode { + return newOpCodeWithNext(ctx, typ, op, newEndOp(ctx, typ)) +} + +func opcodeOffset(idx int) uint32 { + return uint32(idx) * uintptrSize +} + +func getCodeAddrByIdx(head *Opcode, idx uint32) *Opcode { + addr := uintptr(unsafe.Pointer(head)) + uintptr(idx)*unsafe.Sizeof(Opcode{}) + return *(**Opcode)(unsafe.Pointer(&addr)) +} + +func copyOpcode(code *Opcode) *Opcode { + codeNum := ToEndCode(code).DisplayIdx + 1 + codeSlice := make([]Opcode, codeNum) + head := (*Opcode)((*runtime.SliceHeader)(unsafe.Pointer(&codeSlice)).Data) + ptr := head + c := code + for { + *ptr = Opcode{ + Op: c.Op, + Key: c.Key, + PtrNum: c.PtrNum, + NumBitSize: c.NumBitSize, + Flags: c.Flags, + Idx: c.Idx, + Offset: c.Offset, + Type: c.Type, + FieldQuery: c.FieldQuery, + DisplayIdx: c.DisplayIdx, + DisplayKey: c.DisplayKey, + ElemIdx: c.ElemIdx, + Length: c.Length, + Size: c.Size, + Indent: c.Indent, + Jmp: c.Jmp, + } + if c.End != nil { + ptr.End = getCodeAddrByIdx(head, c.End.DisplayIdx) + } + if c.NextField != nil { + ptr.NextField = getCodeAddrByIdx(head, c.NextField.DisplayIdx) + } + if c.Next != nil { + ptr.Next = getCodeAddrByIdx(head, c.Next.DisplayIdx) + } + if c.IsEnd() { + break + } + ptr = getCodeAddrByIdx(head, c.DisplayIdx+1) + c = c.IterNext() + } + return head +} + +func setTotalLengthToInterfaceOp(code *Opcode) { + for c := code; !c.IsEnd(); { + if c.Op == OpInterface || c.Op == OpInterfacePtr { + c.Length = uint32(code.TotalLength()) + } + c = c.IterNext() + } +} + +func ToEndCode(code *Opcode) *Opcode { + c := code + for !c.IsEnd() { + c = c.IterNext() + } + return c +} + +func copyToInterfaceOpcode(code *Opcode) *Opcode { + copied := copyOpcode(code) + c := copied + c = ToEndCode(c) + c.Idx += uintptrSize + c.ElemIdx = c.Idx + uintptrSize + c.Length = c.Idx + 2*uintptrSize + c.Op = OpInterfaceEnd + return copied +} + +func newOpCodeWithNext(ctx *compileContext, typ *runtime.Type, op OpType, next *Opcode) *Opcode { + return &Opcode{ + Op: op, + Idx: opcodeOffset(ctx.ptrIndex), + Next: next, + Type: typ, + DisplayIdx: ctx.opcodeIndex, + Indent: ctx.indent, + } +} + +func newEndOp(ctx *compileContext, typ *runtime.Type) *Opcode { + return newOpCodeWithNext(ctx, typ, OpEnd, nil) +} + +func (c *Opcode) TotalLength() int { + var idx int + code := c + for !code.IsEnd() { + maxIdx := int(code.MaxIdx() / uintptrSize) + if idx < maxIdx { + idx = maxIdx + } + if code.Op == OpRecursiveEnd { + break + } + code = code.IterNext() + } + maxIdx := int(code.MaxIdx() / uintptrSize) + if idx < maxIdx { + idx = maxIdx + } + return idx + 1 +} + +func (c *Opcode) dumpHead(code *Opcode) string { + var length uint32 + if code.Op.CodeType() == CodeArrayHead { + length = code.Length + } else { + length = code.Length / uintptrSize + } + return fmt.Sprintf( + `[%03d]%s%s ([idx:%d][elemIdx:%d][length:%d])`, + code.DisplayIdx, + strings.Repeat("-", int(code.Indent)), + code.Op, + code.Idx/uintptrSize, + code.ElemIdx/uintptrSize, + length, + ) +} + +func (c *Opcode) dumpMapHead(code *Opcode) string { + return fmt.Sprintf( + `[%03d]%s%s ([idx:%d])`, + code.DisplayIdx, + strings.Repeat("-", int(code.Indent)), + code.Op, + code.Idx/uintptrSize, + ) +} + +func (c *Opcode) dumpMapEnd(code *Opcode) string { + return fmt.Sprintf( + `[%03d]%s%s ([idx:%d])`, + code.DisplayIdx, + strings.Repeat("-", int(code.Indent)), + code.Op, + code.Idx/uintptrSize, + ) +} + +func (c *Opcode) dumpElem(code *Opcode) string { + var length uint32 + if code.Op.CodeType() == CodeArrayElem { + length = code.Length + } else { + length = code.Length / uintptrSize + } + return fmt.Sprintf( + `[%03d]%s%s ([idx:%d][elemIdx:%d][length:%d][size:%d])`, + code.DisplayIdx, + strings.Repeat("-", int(code.Indent)), + code.Op, + code.Idx/uintptrSize, + code.ElemIdx/uintptrSize, + length, + code.Size, + ) +} + +func (c *Opcode) dumpField(code *Opcode) string { + return fmt.Sprintf( + `[%03d]%s%s ([idx:%d][key:%s][offset:%d])`, + code.DisplayIdx, + strings.Repeat("-", int(code.Indent)), + code.Op, + code.Idx/uintptrSize, + code.DisplayKey, + code.Offset, + ) +} + +func (c *Opcode) dumpKey(code *Opcode) string { + return fmt.Sprintf( + `[%03d]%s%s ([idx:%d])`, + code.DisplayIdx, + strings.Repeat("-", int(code.Indent)), + code.Op, + code.Idx/uintptrSize, + ) +} + +func (c *Opcode) dumpValue(code *Opcode) string { + return fmt.Sprintf( + `[%03d]%s%s ([idx:%d])`, + code.DisplayIdx, + strings.Repeat("-", int(code.Indent)), + code.Op, + code.Idx/uintptrSize, + ) +} + +func (c *Opcode) Dump() string { + codes := []string{} + for code := c; !code.IsEnd(); { + switch code.Op.CodeType() { + case CodeSliceHead: + codes = append(codes, c.dumpHead(code)) + code = code.Next + case CodeMapHead: + codes = append(codes, c.dumpMapHead(code)) + code = code.Next + case CodeArrayElem, CodeSliceElem: + codes = append(codes, c.dumpElem(code)) + code = code.End + case CodeMapKey: + codes = append(codes, c.dumpKey(code)) + code = code.End + case CodeMapValue: + codes = append(codes, c.dumpValue(code)) + code = code.Next + case CodeMapEnd: + codes = append(codes, c.dumpMapEnd(code)) + code = code.Next + case CodeStructField: + codes = append(codes, c.dumpField(code)) + code = code.Next + case CodeStructEnd: + codes = append(codes, c.dumpField(code)) + code = code.Next + default: + codes = append(codes, fmt.Sprintf( + "[%03d]%s%s ([idx:%d])", + code.DisplayIdx, + strings.Repeat("-", int(code.Indent)), + code.Op, + code.Idx/uintptrSize, + )) + code = code.Next + } + } + return strings.Join(codes, "\n") +} + +func newSliceHeaderCode(ctx *compileContext, typ *runtime.Type) *Opcode { + idx := opcodeOffset(ctx.ptrIndex) + ctx.incPtrIndex() + elemIdx := opcodeOffset(ctx.ptrIndex) + ctx.incPtrIndex() + length := opcodeOffset(ctx.ptrIndex) + return &Opcode{ + Op: OpSlice, + Type: typ, + Idx: idx, + DisplayIdx: ctx.opcodeIndex, + ElemIdx: elemIdx, + Length: length, + Indent: ctx.indent, + } +} + +func newSliceElemCode(ctx *compileContext, typ *runtime.Type, head *Opcode, size uintptr) *Opcode { + return &Opcode{ + Op: OpSliceElem, + Type: typ, + Idx: head.Idx, + DisplayIdx: ctx.opcodeIndex, + ElemIdx: head.ElemIdx, + Length: head.Length, + Indent: ctx.indent, + Size: uint32(size), + } +} + +func newArrayHeaderCode(ctx *compileContext, typ *runtime.Type, alen int) *Opcode { + idx := opcodeOffset(ctx.ptrIndex) + ctx.incPtrIndex() + elemIdx := opcodeOffset(ctx.ptrIndex) + return &Opcode{ + Op: OpArray, + Type: typ, + Idx: idx, + DisplayIdx: ctx.opcodeIndex, + ElemIdx: elemIdx, + Indent: ctx.indent, + Length: uint32(alen), + } +} + +func newArrayElemCode(ctx *compileContext, typ *runtime.Type, head *Opcode, length int, size uintptr) *Opcode { + return &Opcode{ + Op: OpArrayElem, + Type: typ, + Idx: head.Idx, + DisplayIdx: ctx.opcodeIndex, + ElemIdx: head.ElemIdx, + Length: uint32(length), + Indent: ctx.indent, + Size: uint32(size), + } +} + +func newMapHeaderCode(ctx *compileContext, typ *runtime.Type) *Opcode { + idx := opcodeOffset(ctx.ptrIndex) + ctx.incPtrIndex() + return &Opcode{ + Op: OpMap, + Type: typ, + Idx: idx, + DisplayIdx: ctx.opcodeIndex, + Indent: ctx.indent, + } +} + +func newMapKeyCode(ctx *compileContext, typ *runtime.Type, head *Opcode) *Opcode { + return &Opcode{ + Op: OpMapKey, + Type: typ, + Idx: head.Idx, + DisplayIdx: ctx.opcodeIndex, + Indent: ctx.indent, + } +} + +func newMapValueCode(ctx *compileContext, typ *runtime.Type, head *Opcode) *Opcode { + return &Opcode{ + Op: OpMapValue, + Type: typ, + Idx: head.Idx, + DisplayIdx: ctx.opcodeIndex, + Indent: ctx.indent, + } +} + +func newMapEndCode(ctx *compileContext, typ *runtime.Type, head *Opcode) *Opcode { + return &Opcode{ + Op: OpMapEnd, + Type: typ, + Idx: head.Idx, + DisplayIdx: ctx.opcodeIndex, + Indent: ctx.indent, + Next: newEndOp(ctx, typ), + } +} + +func newRecursiveCode(ctx *compileContext, typ *runtime.Type, jmp *CompiledCode) *Opcode { + return &Opcode{ + Op: OpRecursive, + Type: typ, + Idx: opcodeOffset(ctx.ptrIndex), + Next: newEndOp(ctx, typ), + DisplayIdx: ctx.opcodeIndex, + Indent: ctx.indent, + Jmp: jmp, + } +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/option.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/option.go new file mode 100644 index 00000000..82d5ce3e --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/option.go @@ -0,0 +1,47 @@ +package encoder + +import ( + "context" + "io" +) + +type OptionFlag uint8 + +const ( + HTMLEscapeOption OptionFlag = 1 << iota + IndentOption + UnorderedMapOption + DebugOption + ColorizeOption + ContextOption + NormalizeUTF8Option + FieldQueryOption +) + +type Option struct { + Flag OptionFlag + ColorScheme *ColorScheme + Context context.Context + DebugOut io.Writer +} + +type EncodeFormat struct { + Header string + Footer string +} + +type EncodeFormatScheme struct { + Int EncodeFormat + Uint EncodeFormat + Float EncodeFormat + Bool EncodeFormat + String EncodeFormat + Binary EncodeFormat + ObjectKey EncodeFormat + Null EncodeFormat +} + +type ( + ColorScheme = EncodeFormatScheme + ColorFormat = EncodeFormat +) diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/optype.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/optype.go new file mode 100644 index 00000000..5c1241b4 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/optype.go @@ -0,0 +1,932 @@ +// Code generated by internal/cmd/generator. DO NOT EDIT! +package encoder + +import ( + "strings" +) + +type CodeType int + +const ( + CodeOp CodeType = 0 + CodeArrayHead CodeType = 1 + CodeArrayElem CodeType = 2 + CodeSliceHead CodeType = 3 + CodeSliceElem CodeType = 4 + CodeMapHead CodeType = 5 + CodeMapKey CodeType = 6 + CodeMapValue CodeType = 7 + CodeMapEnd CodeType = 8 + CodeRecursive CodeType = 9 + CodeStructField CodeType = 10 + CodeStructEnd CodeType = 11 +) + +var opTypeStrings = [400]string{ + "End", + "Interface", + "Ptr", + "SliceElem", + "SliceEnd", + "ArrayElem", + "ArrayEnd", + "MapKey", + "MapValue", + "MapEnd", + "Recursive", + "RecursivePtr", + "RecursiveEnd", + "InterfaceEnd", + "Int", + "Uint", + "Float32", + "Float64", + "Bool", + "String", + "Bytes", + "Number", + "Array", + "Map", + "Slice", + "Struct", + "MarshalJSON", + "MarshalText", + "IntString", + "UintString", + "Float32String", + "Float64String", + "BoolString", + "StringString", + "NumberString", + "IntPtr", + "UintPtr", + "Float32Ptr", + "Float64Ptr", + "BoolPtr", + "StringPtr", + "BytesPtr", + "NumberPtr", + "ArrayPtr", + "MapPtr", + "SlicePtr", + "MarshalJSONPtr", + "MarshalTextPtr", + "InterfacePtr", + "IntPtrString", + "UintPtrString", + "Float32PtrString", + "Float64PtrString", + "BoolPtrString", + "StringPtrString", + "NumberPtrString", + "StructHeadInt", + "StructHeadOmitEmptyInt", + "StructPtrHeadInt", + "StructPtrHeadOmitEmptyInt", + "StructHeadUint", + "StructHeadOmitEmptyUint", + "StructPtrHeadUint", + "StructPtrHeadOmitEmptyUint", + "StructHeadFloat32", + "StructHeadOmitEmptyFloat32", + "StructPtrHeadFloat32", + "StructPtrHeadOmitEmptyFloat32", + "StructHeadFloat64", + "StructHeadOmitEmptyFloat64", + "StructPtrHeadFloat64", + "StructPtrHeadOmitEmptyFloat64", + "StructHeadBool", + "StructHeadOmitEmptyBool", + "StructPtrHeadBool", + "StructPtrHeadOmitEmptyBool", + "StructHeadString", + "StructHeadOmitEmptyString", + "StructPtrHeadString", + "StructPtrHeadOmitEmptyString", + "StructHeadBytes", + "StructHeadOmitEmptyBytes", + "StructPtrHeadBytes", + "StructPtrHeadOmitEmptyBytes", + "StructHeadNumber", + "StructHeadOmitEmptyNumber", + "StructPtrHeadNumber", + "StructPtrHeadOmitEmptyNumber", + "StructHeadArray", + "StructHeadOmitEmptyArray", + "StructPtrHeadArray", + "StructPtrHeadOmitEmptyArray", + "StructHeadMap", + "StructHeadOmitEmptyMap", + "StructPtrHeadMap", + "StructPtrHeadOmitEmptyMap", + "StructHeadSlice", + "StructHeadOmitEmptySlice", + "StructPtrHeadSlice", + "StructPtrHeadOmitEmptySlice", + "StructHeadStruct", + "StructHeadOmitEmptyStruct", + "StructPtrHeadStruct", + "StructPtrHeadOmitEmptyStruct", + "StructHeadMarshalJSON", + "StructHeadOmitEmptyMarshalJSON", + "StructPtrHeadMarshalJSON", + "StructPtrHeadOmitEmptyMarshalJSON", + "StructHeadMarshalText", + "StructHeadOmitEmptyMarshalText", + "StructPtrHeadMarshalText", + "StructPtrHeadOmitEmptyMarshalText", + "StructHeadIntString", + "StructHeadOmitEmptyIntString", + "StructPtrHeadIntString", + "StructPtrHeadOmitEmptyIntString", + "StructHeadUintString", + "StructHeadOmitEmptyUintString", + "StructPtrHeadUintString", + "StructPtrHeadOmitEmptyUintString", + "StructHeadFloat32String", + "StructHeadOmitEmptyFloat32String", + "StructPtrHeadFloat32String", + "StructPtrHeadOmitEmptyFloat32String", + "StructHeadFloat64String", + "StructHeadOmitEmptyFloat64String", + "StructPtrHeadFloat64String", + "StructPtrHeadOmitEmptyFloat64String", + "StructHeadBoolString", + "StructHeadOmitEmptyBoolString", + "StructPtrHeadBoolString", + "StructPtrHeadOmitEmptyBoolString", + "StructHeadStringString", + "StructHeadOmitEmptyStringString", + "StructPtrHeadStringString", + "StructPtrHeadOmitEmptyStringString", + "StructHeadNumberString", + "StructHeadOmitEmptyNumberString", + "StructPtrHeadNumberString", + "StructPtrHeadOmitEmptyNumberString", + "StructHeadIntPtr", + "StructHeadOmitEmptyIntPtr", + "StructPtrHeadIntPtr", + "StructPtrHeadOmitEmptyIntPtr", + "StructHeadUintPtr", + "StructHeadOmitEmptyUintPtr", + "StructPtrHeadUintPtr", + "StructPtrHeadOmitEmptyUintPtr", + "StructHeadFloat32Ptr", + "StructHeadOmitEmptyFloat32Ptr", + "StructPtrHeadFloat32Ptr", + "StructPtrHeadOmitEmptyFloat32Ptr", + "StructHeadFloat64Ptr", + "StructHeadOmitEmptyFloat64Ptr", + "StructPtrHeadFloat64Ptr", + "StructPtrHeadOmitEmptyFloat64Ptr", + "StructHeadBoolPtr", + "StructHeadOmitEmptyBoolPtr", + "StructPtrHeadBoolPtr", + "StructPtrHeadOmitEmptyBoolPtr", + "StructHeadStringPtr", + "StructHeadOmitEmptyStringPtr", + "StructPtrHeadStringPtr", + "StructPtrHeadOmitEmptyStringPtr", + "StructHeadBytesPtr", + "StructHeadOmitEmptyBytesPtr", + "StructPtrHeadBytesPtr", + "StructPtrHeadOmitEmptyBytesPtr", + "StructHeadNumberPtr", + "StructHeadOmitEmptyNumberPtr", + "StructPtrHeadNumberPtr", + "StructPtrHeadOmitEmptyNumberPtr", + "StructHeadArrayPtr", + "StructHeadOmitEmptyArrayPtr", + "StructPtrHeadArrayPtr", + "StructPtrHeadOmitEmptyArrayPtr", + "StructHeadMapPtr", + "StructHeadOmitEmptyMapPtr", + "StructPtrHeadMapPtr", + "StructPtrHeadOmitEmptyMapPtr", + "StructHeadSlicePtr", + "StructHeadOmitEmptySlicePtr", + "StructPtrHeadSlicePtr", + "StructPtrHeadOmitEmptySlicePtr", + "StructHeadMarshalJSONPtr", + "StructHeadOmitEmptyMarshalJSONPtr", + "StructPtrHeadMarshalJSONPtr", + "StructPtrHeadOmitEmptyMarshalJSONPtr", + "StructHeadMarshalTextPtr", + "StructHeadOmitEmptyMarshalTextPtr", + "StructPtrHeadMarshalTextPtr", + "StructPtrHeadOmitEmptyMarshalTextPtr", + "StructHeadInterfacePtr", + "StructHeadOmitEmptyInterfacePtr", + "StructPtrHeadInterfacePtr", + "StructPtrHeadOmitEmptyInterfacePtr", + "StructHeadIntPtrString", + "StructHeadOmitEmptyIntPtrString", + "StructPtrHeadIntPtrString", + "StructPtrHeadOmitEmptyIntPtrString", + "StructHeadUintPtrString", + "StructHeadOmitEmptyUintPtrString", + "StructPtrHeadUintPtrString", + "StructPtrHeadOmitEmptyUintPtrString", + "StructHeadFloat32PtrString", + "StructHeadOmitEmptyFloat32PtrString", + "StructPtrHeadFloat32PtrString", + "StructPtrHeadOmitEmptyFloat32PtrString", + "StructHeadFloat64PtrString", + "StructHeadOmitEmptyFloat64PtrString", + "StructPtrHeadFloat64PtrString", + "StructPtrHeadOmitEmptyFloat64PtrString", + "StructHeadBoolPtrString", + "StructHeadOmitEmptyBoolPtrString", + "StructPtrHeadBoolPtrString", + "StructPtrHeadOmitEmptyBoolPtrString", + "StructHeadStringPtrString", + "StructHeadOmitEmptyStringPtrString", + "StructPtrHeadStringPtrString", + "StructPtrHeadOmitEmptyStringPtrString", + "StructHeadNumberPtrString", + "StructHeadOmitEmptyNumberPtrString", + "StructPtrHeadNumberPtrString", + "StructPtrHeadOmitEmptyNumberPtrString", + "StructHead", + "StructHeadOmitEmpty", + "StructPtrHead", + "StructPtrHeadOmitEmpty", + "StructFieldInt", + "StructFieldOmitEmptyInt", + "StructEndInt", + "StructEndOmitEmptyInt", + "StructFieldUint", + "StructFieldOmitEmptyUint", + "StructEndUint", + "StructEndOmitEmptyUint", + "StructFieldFloat32", + "StructFieldOmitEmptyFloat32", + "StructEndFloat32", + "StructEndOmitEmptyFloat32", + "StructFieldFloat64", + "StructFieldOmitEmptyFloat64", + "StructEndFloat64", + "StructEndOmitEmptyFloat64", + "StructFieldBool", + "StructFieldOmitEmptyBool", + "StructEndBool", + "StructEndOmitEmptyBool", + "StructFieldString", + "StructFieldOmitEmptyString", + "StructEndString", + "StructEndOmitEmptyString", + "StructFieldBytes", + "StructFieldOmitEmptyBytes", + "StructEndBytes", + "StructEndOmitEmptyBytes", + "StructFieldNumber", + "StructFieldOmitEmptyNumber", + "StructEndNumber", + "StructEndOmitEmptyNumber", + "StructFieldArray", + "StructFieldOmitEmptyArray", + "StructEndArray", + "StructEndOmitEmptyArray", + "StructFieldMap", + "StructFieldOmitEmptyMap", + "StructEndMap", + "StructEndOmitEmptyMap", + "StructFieldSlice", + "StructFieldOmitEmptySlice", + "StructEndSlice", + "StructEndOmitEmptySlice", + "StructFieldStruct", + "StructFieldOmitEmptyStruct", + "StructEndStruct", + "StructEndOmitEmptyStruct", + "StructFieldMarshalJSON", + "StructFieldOmitEmptyMarshalJSON", + "StructEndMarshalJSON", + "StructEndOmitEmptyMarshalJSON", + "StructFieldMarshalText", + "StructFieldOmitEmptyMarshalText", + "StructEndMarshalText", + "StructEndOmitEmptyMarshalText", + "StructFieldIntString", + "StructFieldOmitEmptyIntString", + "StructEndIntString", + "StructEndOmitEmptyIntString", + "StructFieldUintString", + "StructFieldOmitEmptyUintString", + "StructEndUintString", + "StructEndOmitEmptyUintString", + "StructFieldFloat32String", + "StructFieldOmitEmptyFloat32String", + "StructEndFloat32String", + "StructEndOmitEmptyFloat32String", + "StructFieldFloat64String", + "StructFieldOmitEmptyFloat64String", + "StructEndFloat64String", + "StructEndOmitEmptyFloat64String", + "StructFieldBoolString", + "StructFieldOmitEmptyBoolString", + "StructEndBoolString", + "StructEndOmitEmptyBoolString", + "StructFieldStringString", + "StructFieldOmitEmptyStringString", + "StructEndStringString", + "StructEndOmitEmptyStringString", + "StructFieldNumberString", + "StructFieldOmitEmptyNumberString", + "StructEndNumberString", + "StructEndOmitEmptyNumberString", + "StructFieldIntPtr", + "StructFieldOmitEmptyIntPtr", + "StructEndIntPtr", + "StructEndOmitEmptyIntPtr", + "StructFieldUintPtr", + "StructFieldOmitEmptyUintPtr", + "StructEndUintPtr", + "StructEndOmitEmptyUintPtr", + "StructFieldFloat32Ptr", + "StructFieldOmitEmptyFloat32Ptr", + "StructEndFloat32Ptr", + "StructEndOmitEmptyFloat32Ptr", + "StructFieldFloat64Ptr", + "StructFieldOmitEmptyFloat64Ptr", + "StructEndFloat64Ptr", + "StructEndOmitEmptyFloat64Ptr", + "StructFieldBoolPtr", + "StructFieldOmitEmptyBoolPtr", + "StructEndBoolPtr", + "StructEndOmitEmptyBoolPtr", + "StructFieldStringPtr", + "StructFieldOmitEmptyStringPtr", + "StructEndStringPtr", + "StructEndOmitEmptyStringPtr", + "StructFieldBytesPtr", + "StructFieldOmitEmptyBytesPtr", + "StructEndBytesPtr", + "StructEndOmitEmptyBytesPtr", + "StructFieldNumberPtr", + "StructFieldOmitEmptyNumberPtr", + "StructEndNumberPtr", + "StructEndOmitEmptyNumberPtr", + "StructFieldArrayPtr", + "StructFieldOmitEmptyArrayPtr", + "StructEndArrayPtr", + "StructEndOmitEmptyArrayPtr", + "StructFieldMapPtr", + "StructFieldOmitEmptyMapPtr", + "StructEndMapPtr", + "StructEndOmitEmptyMapPtr", + "StructFieldSlicePtr", + "StructFieldOmitEmptySlicePtr", + "StructEndSlicePtr", + "StructEndOmitEmptySlicePtr", + "StructFieldMarshalJSONPtr", + "StructFieldOmitEmptyMarshalJSONPtr", + "StructEndMarshalJSONPtr", + "StructEndOmitEmptyMarshalJSONPtr", + "StructFieldMarshalTextPtr", + "StructFieldOmitEmptyMarshalTextPtr", + "StructEndMarshalTextPtr", + "StructEndOmitEmptyMarshalTextPtr", + "StructFieldInterfacePtr", + "StructFieldOmitEmptyInterfacePtr", + "StructEndInterfacePtr", + "StructEndOmitEmptyInterfacePtr", + "StructFieldIntPtrString", + "StructFieldOmitEmptyIntPtrString", + "StructEndIntPtrString", + "StructEndOmitEmptyIntPtrString", + "StructFieldUintPtrString", + "StructFieldOmitEmptyUintPtrString", + "StructEndUintPtrString", + "StructEndOmitEmptyUintPtrString", + "StructFieldFloat32PtrString", + "StructFieldOmitEmptyFloat32PtrString", + "StructEndFloat32PtrString", + "StructEndOmitEmptyFloat32PtrString", + "StructFieldFloat64PtrString", + "StructFieldOmitEmptyFloat64PtrString", + "StructEndFloat64PtrString", + "StructEndOmitEmptyFloat64PtrString", + "StructFieldBoolPtrString", + "StructFieldOmitEmptyBoolPtrString", + "StructEndBoolPtrString", + "StructEndOmitEmptyBoolPtrString", + "StructFieldStringPtrString", + "StructFieldOmitEmptyStringPtrString", + "StructEndStringPtrString", + "StructEndOmitEmptyStringPtrString", + "StructFieldNumberPtrString", + "StructFieldOmitEmptyNumberPtrString", + "StructEndNumberPtrString", + "StructEndOmitEmptyNumberPtrString", + "StructField", + "StructFieldOmitEmpty", + "StructEnd", + "StructEndOmitEmpty", +} + +type OpType uint16 + +const ( + OpEnd OpType = 0 + OpInterface OpType = 1 + OpPtr OpType = 2 + OpSliceElem OpType = 3 + OpSliceEnd OpType = 4 + OpArrayElem OpType = 5 + OpArrayEnd OpType = 6 + OpMapKey OpType = 7 + OpMapValue OpType = 8 + OpMapEnd OpType = 9 + OpRecursive OpType = 10 + OpRecursivePtr OpType = 11 + OpRecursiveEnd OpType = 12 + OpInterfaceEnd OpType = 13 + OpInt OpType = 14 + OpUint OpType = 15 + OpFloat32 OpType = 16 + OpFloat64 OpType = 17 + OpBool OpType = 18 + OpString OpType = 19 + OpBytes OpType = 20 + OpNumber OpType = 21 + OpArray OpType = 22 + OpMap OpType = 23 + OpSlice OpType = 24 + OpStruct OpType = 25 + OpMarshalJSON OpType = 26 + OpMarshalText OpType = 27 + OpIntString OpType = 28 + OpUintString OpType = 29 + OpFloat32String OpType = 30 + OpFloat64String OpType = 31 + OpBoolString OpType = 32 + OpStringString OpType = 33 + OpNumberString OpType = 34 + OpIntPtr OpType = 35 + OpUintPtr OpType = 36 + OpFloat32Ptr OpType = 37 + OpFloat64Ptr OpType = 38 + OpBoolPtr OpType = 39 + OpStringPtr OpType = 40 + OpBytesPtr OpType = 41 + OpNumberPtr OpType = 42 + OpArrayPtr OpType = 43 + OpMapPtr OpType = 44 + OpSlicePtr OpType = 45 + OpMarshalJSONPtr OpType = 46 + OpMarshalTextPtr OpType = 47 + OpInterfacePtr OpType = 48 + OpIntPtrString OpType = 49 + OpUintPtrString OpType = 50 + OpFloat32PtrString OpType = 51 + OpFloat64PtrString OpType = 52 + OpBoolPtrString OpType = 53 + OpStringPtrString OpType = 54 + OpNumberPtrString OpType = 55 + OpStructHeadInt OpType = 56 + OpStructHeadOmitEmptyInt OpType = 57 + OpStructPtrHeadInt OpType = 58 + OpStructPtrHeadOmitEmptyInt OpType = 59 + OpStructHeadUint OpType = 60 + OpStructHeadOmitEmptyUint OpType = 61 + OpStructPtrHeadUint OpType = 62 + OpStructPtrHeadOmitEmptyUint OpType = 63 + OpStructHeadFloat32 OpType = 64 + OpStructHeadOmitEmptyFloat32 OpType = 65 + OpStructPtrHeadFloat32 OpType = 66 + OpStructPtrHeadOmitEmptyFloat32 OpType = 67 + OpStructHeadFloat64 OpType = 68 + OpStructHeadOmitEmptyFloat64 OpType = 69 + OpStructPtrHeadFloat64 OpType = 70 + OpStructPtrHeadOmitEmptyFloat64 OpType = 71 + OpStructHeadBool OpType = 72 + OpStructHeadOmitEmptyBool OpType = 73 + OpStructPtrHeadBool OpType = 74 + OpStructPtrHeadOmitEmptyBool OpType = 75 + OpStructHeadString OpType = 76 + OpStructHeadOmitEmptyString OpType = 77 + OpStructPtrHeadString OpType = 78 + OpStructPtrHeadOmitEmptyString OpType = 79 + OpStructHeadBytes OpType = 80 + OpStructHeadOmitEmptyBytes OpType = 81 + OpStructPtrHeadBytes OpType = 82 + OpStructPtrHeadOmitEmptyBytes OpType = 83 + OpStructHeadNumber OpType = 84 + OpStructHeadOmitEmptyNumber OpType = 85 + OpStructPtrHeadNumber OpType = 86 + OpStructPtrHeadOmitEmptyNumber OpType = 87 + OpStructHeadArray OpType = 88 + OpStructHeadOmitEmptyArray OpType = 89 + OpStructPtrHeadArray OpType = 90 + OpStructPtrHeadOmitEmptyArray OpType = 91 + OpStructHeadMap OpType = 92 + OpStructHeadOmitEmptyMap OpType = 93 + OpStructPtrHeadMap OpType = 94 + OpStructPtrHeadOmitEmptyMap OpType = 95 + OpStructHeadSlice OpType = 96 + OpStructHeadOmitEmptySlice OpType = 97 + OpStructPtrHeadSlice OpType = 98 + OpStructPtrHeadOmitEmptySlice OpType = 99 + OpStructHeadStruct OpType = 100 + OpStructHeadOmitEmptyStruct OpType = 101 + OpStructPtrHeadStruct OpType = 102 + OpStructPtrHeadOmitEmptyStruct OpType = 103 + OpStructHeadMarshalJSON OpType = 104 + OpStructHeadOmitEmptyMarshalJSON OpType = 105 + OpStructPtrHeadMarshalJSON OpType = 106 + OpStructPtrHeadOmitEmptyMarshalJSON OpType = 107 + OpStructHeadMarshalText OpType = 108 + OpStructHeadOmitEmptyMarshalText OpType = 109 + OpStructPtrHeadMarshalText OpType = 110 + OpStructPtrHeadOmitEmptyMarshalText OpType = 111 + OpStructHeadIntString OpType = 112 + OpStructHeadOmitEmptyIntString OpType = 113 + OpStructPtrHeadIntString OpType = 114 + OpStructPtrHeadOmitEmptyIntString OpType = 115 + OpStructHeadUintString OpType = 116 + OpStructHeadOmitEmptyUintString OpType = 117 + OpStructPtrHeadUintString OpType = 118 + OpStructPtrHeadOmitEmptyUintString OpType = 119 + OpStructHeadFloat32String OpType = 120 + OpStructHeadOmitEmptyFloat32String OpType = 121 + OpStructPtrHeadFloat32String OpType = 122 + OpStructPtrHeadOmitEmptyFloat32String OpType = 123 + OpStructHeadFloat64String OpType = 124 + OpStructHeadOmitEmptyFloat64String OpType = 125 + OpStructPtrHeadFloat64String OpType = 126 + OpStructPtrHeadOmitEmptyFloat64String OpType = 127 + OpStructHeadBoolString OpType = 128 + OpStructHeadOmitEmptyBoolString OpType = 129 + OpStructPtrHeadBoolString OpType = 130 + OpStructPtrHeadOmitEmptyBoolString OpType = 131 + OpStructHeadStringString OpType = 132 + OpStructHeadOmitEmptyStringString OpType = 133 + OpStructPtrHeadStringString OpType = 134 + OpStructPtrHeadOmitEmptyStringString OpType = 135 + OpStructHeadNumberString OpType = 136 + OpStructHeadOmitEmptyNumberString OpType = 137 + OpStructPtrHeadNumberString OpType = 138 + OpStructPtrHeadOmitEmptyNumberString OpType = 139 + OpStructHeadIntPtr OpType = 140 + OpStructHeadOmitEmptyIntPtr OpType = 141 + OpStructPtrHeadIntPtr OpType = 142 + OpStructPtrHeadOmitEmptyIntPtr OpType = 143 + OpStructHeadUintPtr OpType = 144 + OpStructHeadOmitEmptyUintPtr OpType = 145 + OpStructPtrHeadUintPtr OpType = 146 + OpStructPtrHeadOmitEmptyUintPtr OpType = 147 + OpStructHeadFloat32Ptr OpType = 148 + OpStructHeadOmitEmptyFloat32Ptr OpType = 149 + OpStructPtrHeadFloat32Ptr OpType = 150 + OpStructPtrHeadOmitEmptyFloat32Ptr OpType = 151 + OpStructHeadFloat64Ptr OpType = 152 + OpStructHeadOmitEmptyFloat64Ptr OpType = 153 + OpStructPtrHeadFloat64Ptr OpType = 154 + OpStructPtrHeadOmitEmptyFloat64Ptr OpType = 155 + OpStructHeadBoolPtr OpType = 156 + OpStructHeadOmitEmptyBoolPtr OpType = 157 + OpStructPtrHeadBoolPtr OpType = 158 + OpStructPtrHeadOmitEmptyBoolPtr OpType = 159 + OpStructHeadStringPtr OpType = 160 + OpStructHeadOmitEmptyStringPtr OpType = 161 + OpStructPtrHeadStringPtr OpType = 162 + OpStructPtrHeadOmitEmptyStringPtr OpType = 163 + OpStructHeadBytesPtr OpType = 164 + OpStructHeadOmitEmptyBytesPtr OpType = 165 + OpStructPtrHeadBytesPtr OpType = 166 + OpStructPtrHeadOmitEmptyBytesPtr OpType = 167 + OpStructHeadNumberPtr OpType = 168 + OpStructHeadOmitEmptyNumberPtr OpType = 169 + OpStructPtrHeadNumberPtr OpType = 170 + OpStructPtrHeadOmitEmptyNumberPtr OpType = 171 + OpStructHeadArrayPtr OpType = 172 + OpStructHeadOmitEmptyArrayPtr OpType = 173 + OpStructPtrHeadArrayPtr OpType = 174 + OpStructPtrHeadOmitEmptyArrayPtr OpType = 175 + OpStructHeadMapPtr OpType = 176 + OpStructHeadOmitEmptyMapPtr OpType = 177 + OpStructPtrHeadMapPtr OpType = 178 + OpStructPtrHeadOmitEmptyMapPtr OpType = 179 + OpStructHeadSlicePtr OpType = 180 + OpStructHeadOmitEmptySlicePtr OpType = 181 + OpStructPtrHeadSlicePtr OpType = 182 + OpStructPtrHeadOmitEmptySlicePtr OpType = 183 + OpStructHeadMarshalJSONPtr OpType = 184 + OpStructHeadOmitEmptyMarshalJSONPtr OpType = 185 + OpStructPtrHeadMarshalJSONPtr OpType = 186 + OpStructPtrHeadOmitEmptyMarshalJSONPtr OpType = 187 + OpStructHeadMarshalTextPtr OpType = 188 + OpStructHeadOmitEmptyMarshalTextPtr OpType = 189 + OpStructPtrHeadMarshalTextPtr OpType = 190 + OpStructPtrHeadOmitEmptyMarshalTextPtr OpType = 191 + OpStructHeadInterfacePtr OpType = 192 + OpStructHeadOmitEmptyInterfacePtr OpType = 193 + OpStructPtrHeadInterfacePtr OpType = 194 + OpStructPtrHeadOmitEmptyInterfacePtr OpType = 195 + OpStructHeadIntPtrString OpType = 196 + OpStructHeadOmitEmptyIntPtrString OpType = 197 + OpStructPtrHeadIntPtrString OpType = 198 + OpStructPtrHeadOmitEmptyIntPtrString OpType = 199 + OpStructHeadUintPtrString OpType = 200 + OpStructHeadOmitEmptyUintPtrString OpType = 201 + OpStructPtrHeadUintPtrString OpType = 202 + OpStructPtrHeadOmitEmptyUintPtrString OpType = 203 + OpStructHeadFloat32PtrString OpType = 204 + OpStructHeadOmitEmptyFloat32PtrString OpType = 205 + OpStructPtrHeadFloat32PtrString OpType = 206 + OpStructPtrHeadOmitEmptyFloat32PtrString OpType = 207 + OpStructHeadFloat64PtrString OpType = 208 + OpStructHeadOmitEmptyFloat64PtrString OpType = 209 + OpStructPtrHeadFloat64PtrString OpType = 210 + OpStructPtrHeadOmitEmptyFloat64PtrString OpType = 211 + OpStructHeadBoolPtrString OpType = 212 + OpStructHeadOmitEmptyBoolPtrString OpType = 213 + OpStructPtrHeadBoolPtrString OpType = 214 + OpStructPtrHeadOmitEmptyBoolPtrString OpType = 215 + OpStructHeadStringPtrString OpType = 216 + OpStructHeadOmitEmptyStringPtrString OpType = 217 + OpStructPtrHeadStringPtrString OpType = 218 + OpStructPtrHeadOmitEmptyStringPtrString OpType = 219 + OpStructHeadNumberPtrString OpType = 220 + OpStructHeadOmitEmptyNumberPtrString OpType = 221 + OpStructPtrHeadNumberPtrString OpType = 222 + OpStructPtrHeadOmitEmptyNumberPtrString OpType = 223 + OpStructHead OpType = 224 + OpStructHeadOmitEmpty OpType = 225 + OpStructPtrHead OpType = 226 + OpStructPtrHeadOmitEmpty OpType = 227 + OpStructFieldInt OpType = 228 + OpStructFieldOmitEmptyInt OpType = 229 + OpStructEndInt OpType = 230 + OpStructEndOmitEmptyInt OpType = 231 + OpStructFieldUint OpType = 232 + OpStructFieldOmitEmptyUint OpType = 233 + OpStructEndUint OpType = 234 + OpStructEndOmitEmptyUint OpType = 235 + OpStructFieldFloat32 OpType = 236 + OpStructFieldOmitEmptyFloat32 OpType = 237 + OpStructEndFloat32 OpType = 238 + OpStructEndOmitEmptyFloat32 OpType = 239 + OpStructFieldFloat64 OpType = 240 + OpStructFieldOmitEmptyFloat64 OpType = 241 + OpStructEndFloat64 OpType = 242 + OpStructEndOmitEmptyFloat64 OpType = 243 + OpStructFieldBool OpType = 244 + OpStructFieldOmitEmptyBool OpType = 245 + OpStructEndBool OpType = 246 + OpStructEndOmitEmptyBool OpType = 247 + OpStructFieldString OpType = 248 + OpStructFieldOmitEmptyString OpType = 249 + OpStructEndString OpType = 250 + OpStructEndOmitEmptyString OpType = 251 + OpStructFieldBytes OpType = 252 + OpStructFieldOmitEmptyBytes OpType = 253 + OpStructEndBytes OpType = 254 + OpStructEndOmitEmptyBytes OpType = 255 + OpStructFieldNumber OpType = 256 + OpStructFieldOmitEmptyNumber OpType = 257 + OpStructEndNumber OpType = 258 + OpStructEndOmitEmptyNumber OpType = 259 + OpStructFieldArray OpType = 260 + OpStructFieldOmitEmptyArray OpType = 261 + OpStructEndArray OpType = 262 + OpStructEndOmitEmptyArray OpType = 263 + OpStructFieldMap OpType = 264 + OpStructFieldOmitEmptyMap OpType = 265 + OpStructEndMap OpType = 266 + OpStructEndOmitEmptyMap OpType = 267 + OpStructFieldSlice OpType = 268 + OpStructFieldOmitEmptySlice OpType = 269 + OpStructEndSlice OpType = 270 + OpStructEndOmitEmptySlice OpType = 271 + OpStructFieldStruct OpType = 272 + OpStructFieldOmitEmptyStruct OpType = 273 + OpStructEndStruct OpType = 274 + OpStructEndOmitEmptyStruct OpType = 275 + OpStructFieldMarshalJSON OpType = 276 + OpStructFieldOmitEmptyMarshalJSON OpType = 277 + OpStructEndMarshalJSON OpType = 278 + OpStructEndOmitEmptyMarshalJSON OpType = 279 + OpStructFieldMarshalText OpType = 280 + OpStructFieldOmitEmptyMarshalText OpType = 281 + OpStructEndMarshalText OpType = 282 + OpStructEndOmitEmptyMarshalText OpType = 283 + OpStructFieldIntString OpType = 284 + OpStructFieldOmitEmptyIntString OpType = 285 + OpStructEndIntString OpType = 286 + OpStructEndOmitEmptyIntString OpType = 287 + OpStructFieldUintString OpType = 288 + OpStructFieldOmitEmptyUintString OpType = 289 + OpStructEndUintString OpType = 290 + OpStructEndOmitEmptyUintString OpType = 291 + OpStructFieldFloat32String OpType = 292 + OpStructFieldOmitEmptyFloat32String OpType = 293 + OpStructEndFloat32String OpType = 294 + OpStructEndOmitEmptyFloat32String OpType = 295 + OpStructFieldFloat64String OpType = 296 + OpStructFieldOmitEmptyFloat64String OpType = 297 + OpStructEndFloat64String OpType = 298 + OpStructEndOmitEmptyFloat64String OpType = 299 + OpStructFieldBoolString OpType = 300 + OpStructFieldOmitEmptyBoolString OpType = 301 + OpStructEndBoolString OpType = 302 + OpStructEndOmitEmptyBoolString OpType = 303 + OpStructFieldStringString OpType = 304 + OpStructFieldOmitEmptyStringString OpType = 305 + OpStructEndStringString OpType = 306 + OpStructEndOmitEmptyStringString OpType = 307 + OpStructFieldNumberString OpType = 308 + OpStructFieldOmitEmptyNumberString OpType = 309 + OpStructEndNumberString OpType = 310 + OpStructEndOmitEmptyNumberString OpType = 311 + OpStructFieldIntPtr OpType = 312 + OpStructFieldOmitEmptyIntPtr OpType = 313 + OpStructEndIntPtr OpType = 314 + OpStructEndOmitEmptyIntPtr OpType = 315 + OpStructFieldUintPtr OpType = 316 + OpStructFieldOmitEmptyUintPtr OpType = 317 + OpStructEndUintPtr OpType = 318 + OpStructEndOmitEmptyUintPtr OpType = 319 + OpStructFieldFloat32Ptr OpType = 320 + OpStructFieldOmitEmptyFloat32Ptr OpType = 321 + OpStructEndFloat32Ptr OpType = 322 + OpStructEndOmitEmptyFloat32Ptr OpType = 323 + OpStructFieldFloat64Ptr OpType = 324 + OpStructFieldOmitEmptyFloat64Ptr OpType = 325 + OpStructEndFloat64Ptr OpType = 326 + OpStructEndOmitEmptyFloat64Ptr OpType = 327 + OpStructFieldBoolPtr OpType = 328 + OpStructFieldOmitEmptyBoolPtr OpType = 329 + OpStructEndBoolPtr OpType = 330 + OpStructEndOmitEmptyBoolPtr OpType = 331 + OpStructFieldStringPtr OpType = 332 + OpStructFieldOmitEmptyStringPtr OpType = 333 + OpStructEndStringPtr OpType = 334 + OpStructEndOmitEmptyStringPtr OpType = 335 + OpStructFieldBytesPtr OpType = 336 + OpStructFieldOmitEmptyBytesPtr OpType = 337 + OpStructEndBytesPtr OpType = 338 + OpStructEndOmitEmptyBytesPtr OpType = 339 + OpStructFieldNumberPtr OpType = 340 + OpStructFieldOmitEmptyNumberPtr OpType = 341 + OpStructEndNumberPtr OpType = 342 + OpStructEndOmitEmptyNumberPtr OpType = 343 + OpStructFieldArrayPtr OpType = 344 + OpStructFieldOmitEmptyArrayPtr OpType = 345 + OpStructEndArrayPtr OpType = 346 + OpStructEndOmitEmptyArrayPtr OpType = 347 + OpStructFieldMapPtr OpType = 348 + OpStructFieldOmitEmptyMapPtr OpType = 349 + OpStructEndMapPtr OpType = 350 + OpStructEndOmitEmptyMapPtr OpType = 351 + OpStructFieldSlicePtr OpType = 352 + OpStructFieldOmitEmptySlicePtr OpType = 353 + OpStructEndSlicePtr OpType = 354 + OpStructEndOmitEmptySlicePtr OpType = 355 + OpStructFieldMarshalJSONPtr OpType = 356 + OpStructFieldOmitEmptyMarshalJSONPtr OpType = 357 + OpStructEndMarshalJSONPtr OpType = 358 + OpStructEndOmitEmptyMarshalJSONPtr OpType = 359 + OpStructFieldMarshalTextPtr OpType = 360 + OpStructFieldOmitEmptyMarshalTextPtr OpType = 361 + OpStructEndMarshalTextPtr OpType = 362 + OpStructEndOmitEmptyMarshalTextPtr OpType = 363 + OpStructFieldInterfacePtr OpType = 364 + OpStructFieldOmitEmptyInterfacePtr OpType = 365 + OpStructEndInterfacePtr OpType = 366 + OpStructEndOmitEmptyInterfacePtr OpType = 367 + OpStructFieldIntPtrString OpType = 368 + OpStructFieldOmitEmptyIntPtrString OpType = 369 + OpStructEndIntPtrString OpType = 370 + OpStructEndOmitEmptyIntPtrString OpType = 371 + OpStructFieldUintPtrString OpType = 372 + OpStructFieldOmitEmptyUintPtrString OpType = 373 + OpStructEndUintPtrString OpType = 374 + OpStructEndOmitEmptyUintPtrString OpType = 375 + OpStructFieldFloat32PtrString OpType = 376 + OpStructFieldOmitEmptyFloat32PtrString OpType = 377 + OpStructEndFloat32PtrString OpType = 378 + OpStructEndOmitEmptyFloat32PtrString OpType = 379 + OpStructFieldFloat64PtrString OpType = 380 + OpStructFieldOmitEmptyFloat64PtrString OpType = 381 + OpStructEndFloat64PtrString OpType = 382 + OpStructEndOmitEmptyFloat64PtrString OpType = 383 + OpStructFieldBoolPtrString OpType = 384 + OpStructFieldOmitEmptyBoolPtrString OpType = 385 + OpStructEndBoolPtrString OpType = 386 + OpStructEndOmitEmptyBoolPtrString OpType = 387 + OpStructFieldStringPtrString OpType = 388 + OpStructFieldOmitEmptyStringPtrString OpType = 389 + OpStructEndStringPtrString OpType = 390 + OpStructEndOmitEmptyStringPtrString OpType = 391 + OpStructFieldNumberPtrString OpType = 392 + OpStructFieldOmitEmptyNumberPtrString OpType = 393 + OpStructEndNumberPtrString OpType = 394 + OpStructEndOmitEmptyNumberPtrString OpType = 395 + OpStructField OpType = 396 + OpStructFieldOmitEmpty OpType = 397 + OpStructEnd OpType = 398 + OpStructEndOmitEmpty OpType = 399 +) + +func (t OpType) String() string { + if int(t) >= 400 { + return "" + } + return opTypeStrings[int(t)] +} + +func (t OpType) CodeType() CodeType { + if strings.Contains(t.String(), "Struct") { + if strings.Contains(t.String(), "End") { + return CodeStructEnd + } + return CodeStructField + } + switch t { + case OpArray, OpArrayPtr: + return CodeArrayHead + case OpArrayElem: + return CodeArrayElem + case OpSlice, OpSlicePtr: + return CodeSliceHead + case OpSliceElem: + return CodeSliceElem + case OpMap, OpMapPtr: + return CodeMapHead + case OpMapKey: + return CodeMapKey + case OpMapValue: + return CodeMapValue + case OpMapEnd: + return CodeMapEnd + } + + return CodeOp +} + +func (t OpType) HeadToPtrHead() OpType { + if strings.Index(t.String(), "PtrHead") > 0 { + return t + } + + idx := strings.Index(t.String(), "Head") + if idx == -1 { + return t + } + suffix := "PtrHead" + t.String()[idx+len("Head"):] + + const toPtrOffset = 2 + if strings.Contains(OpType(int(t)+toPtrOffset).String(), suffix) { + return OpType(int(t) + toPtrOffset) + } + return t +} + +func (t OpType) HeadToOmitEmptyHead() OpType { + const toOmitEmptyOffset = 1 + if strings.Contains(OpType(int(t)+toOmitEmptyOffset).String(), "OmitEmpty") { + return OpType(int(t) + toOmitEmptyOffset) + } + + return t +} + +func (t OpType) PtrHeadToHead() OpType { + idx := strings.Index(t.String(), "PtrHead") + if idx == -1 { + return t + } + suffix := t.String()[idx+len("Ptr"):] + + const toPtrOffset = 2 + if strings.Contains(OpType(int(t)-toPtrOffset).String(), suffix) { + return OpType(int(t) - toPtrOffset) + } + return t +} + +func (t OpType) FieldToEnd() OpType { + idx := strings.Index(t.String(), "Field") + if idx == -1 { + return t + } + suffix := t.String()[idx+len("Field"):] + if suffix == "" || suffix == "OmitEmpty" { + return t + } + const toEndOffset = 2 + if strings.Contains(OpType(int(t)+toEndOffset).String(), "End"+suffix) { + return OpType(int(t) + toEndOffset) + } + return t +} + +func (t OpType) FieldToOmitEmptyField() OpType { + const toOmitEmptyOffset = 1 + if strings.Contains(OpType(int(t)+toOmitEmptyOffset).String(), "OmitEmpty") { + return OpType(int(t) + toOmitEmptyOffset) + } + return t +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/query.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/query.go new file mode 100644 index 00000000..1e1850cc --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/query.go @@ -0,0 +1,135 @@ +package encoder + +import ( + "context" + "fmt" + "reflect" +) + +var ( + Marshal func(interface{}) ([]byte, error) + Unmarshal func([]byte, interface{}) error +) + +type FieldQuery struct { + Name string + Fields []*FieldQuery + hash string +} + +func (q *FieldQuery) Hash() string { + if q.hash != "" { + return q.hash + } + b, _ := Marshal(q) + q.hash = string(b) + return q.hash +} + +func (q *FieldQuery) MarshalJSON() ([]byte, error) { + if q.Name != "" { + if len(q.Fields) > 0 { + return Marshal(map[string][]*FieldQuery{q.Name: q.Fields}) + } + return Marshal(q.Name) + } + return Marshal(q.Fields) +} + +func (q *FieldQuery) QueryString() (FieldQueryString, error) { + b, err := Marshal(q) + if err != nil { + return "", err + } + return FieldQueryString(b), nil +} + +type FieldQueryString string + +func (s FieldQueryString) Build() (*FieldQuery, error) { + var query interface{} + if err := Unmarshal([]byte(s), &query); err != nil { + return nil, err + } + return s.build(reflect.ValueOf(query)) +} + +func (s FieldQueryString) build(v reflect.Value) (*FieldQuery, error) { + switch v.Type().Kind() { + case reflect.String: + return s.buildString(v) + case reflect.Map: + return s.buildMap(v) + case reflect.Slice: + return s.buildSlice(v) + case reflect.Interface: + return s.build(reflect.ValueOf(v.Interface())) + } + return nil, fmt.Errorf("failed to build field query") +} + +func (s FieldQueryString) buildString(v reflect.Value) (*FieldQuery, error) { + b := []byte(v.String()) + switch b[0] { + case '[', '{': + var query interface{} + if err := Unmarshal(b, &query); err != nil { + return nil, err + } + if str, ok := query.(string); ok { + return &FieldQuery{Name: str}, nil + } + return s.build(reflect.ValueOf(query)) + } + return &FieldQuery{Name: string(b)}, nil +} + +func (s FieldQueryString) buildSlice(v reflect.Value) (*FieldQuery, error) { + fields := make([]*FieldQuery, 0, v.Len()) + for i := 0; i < v.Len(); i++ { + def, err := s.build(v.Index(i)) + if err != nil { + return nil, err + } + fields = append(fields, def) + } + return &FieldQuery{Fields: fields}, nil +} + +func (s FieldQueryString) buildMap(v reflect.Value) (*FieldQuery, error) { + keys := v.MapKeys() + if len(keys) != 1 { + return nil, fmt.Errorf("failed to build field query object") + } + key := keys[0] + if key.Type().Kind() != reflect.String { + return nil, fmt.Errorf("failed to build field query. invalid object key type") + } + name := key.String() + def, err := s.build(v.MapIndex(key)) + if err != nil { + return nil, err + } + return &FieldQuery{ + Name: name, + Fields: def.Fields, + }, nil +} + +type queryKey struct{} + +func FieldQueryFromContext(ctx context.Context) *FieldQuery { + query := ctx.Value(queryKey{}) + if query == nil { + return nil + } + q, ok := query.(*FieldQuery) + if !ok { + return nil + } + return q +} + +func SetFieldQueryToContext(ctx context.Context, query *FieldQuery) context.Context { + return context.WithValue(ctx, queryKey{}, query) +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/string.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/string.go new file mode 100644 index 00000000..e4152b27 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/string.go @@ -0,0 +1,459 @@ +package encoder + +import ( + "math/bits" + "reflect" + "unsafe" +) + +const ( + lsb = 0x0101010101010101 + msb = 0x8080808080808080 +) + +var hex = "0123456789abcdef" + +//nolint:govet +func stringToUint64Slice(s string) []uint64 { + return *(*[]uint64)(unsafe.Pointer(&reflect.SliceHeader{ + Data: ((*reflect.StringHeader)(unsafe.Pointer(&s))).Data, + Len: len(s) / 8, + Cap: len(s) / 8, + })) +} + +func AppendString(ctx *RuntimeContext, buf []byte, s string) []byte { + if ctx.Option.Flag&HTMLEscapeOption != 0 { + if ctx.Option.Flag&NormalizeUTF8Option != 0 { + return appendNormalizedHTMLString(buf, s) + } + return appendHTMLString(buf, s) + } + if ctx.Option.Flag&NormalizeUTF8Option != 0 { + return appendNormalizedString(buf, s) + } + return appendString(buf, s) +} + +func appendNormalizedHTMLString(buf []byte, s string) []byte { + valLen := len(s) + if valLen == 0 { + return append(buf, `""`...) + } + buf = append(buf, '"') + var ( + i, j int + ) + if valLen >= 8 { + chunks := stringToUint64Slice(s) + for _, n := range chunks { + // combine masks before checking for the MSB of each byte. We include + // `n` in the mask to check whether any of the *input* byte MSBs were + // set (i.e. the byte was outside the ASCII range). + mask := n | (n - (lsb * 0x20)) | + ((n ^ (lsb * '"')) - lsb) | + ((n ^ (lsb * '\\')) - lsb) | + ((n ^ (lsb * '<')) - lsb) | + ((n ^ (lsb * '>')) - lsb) | + ((n ^ (lsb * '&')) - lsb) + if (mask & msb) != 0 { + j = bits.TrailingZeros64(mask&msb) / 8 + goto ESCAPE_END + } + } + for i := len(chunks) * 8; i < valLen; i++ { + if needEscapeHTMLNormalizeUTF8[s[i]] { + j = i + goto ESCAPE_END + } + } + // no found any escape characters. + return append(append(buf, s...), '"') + } +ESCAPE_END: + for j < valLen { + c := s[j] + + if !needEscapeHTMLNormalizeUTF8[c] { + // fast path: most of the time, printable ascii characters are used + j++ + continue + } + + switch c { + case '\\', '"': + buf = append(buf, s[i:j]...) + buf = append(buf, '\\', c) + i = j + 1 + j = j + 1 + continue + + case '\n': + buf = append(buf, s[i:j]...) + buf = append(buf, '\\', 'n') + i = j + 1 + j = j + 1 + continue + + case '\r': + buf = append(buf, s[i:j]...) + buf = append(buf, '\\', 'r') + i = j + 1 + j = j + 1 + continue + + case '\t': + buf = append(buf, s[i:j]...) + buf = append(buf, '\\', 't') + i = j + 1 + j = j + 1 + continue + + case '<', '>', '&': + buf = append(buf, s[i:j]...) + buf = append(buf, `\u00`...) + buf = append(buf, hex[c>>4], hex[c&0xF]) + i = j + 1 + j = j + 1 + continue + + case 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x0B, 0x0C, 0x0E, 0x0F, // 0x00-0x0F + 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E, 0x1F: // 0x10-0x1F + buf = append(buf, s[i:j]...) + buf = append(buf, `\u00`...) + buf = append(buf, hex[c>>4], hex[c&0xF]) + i = j + 1 + j = j + 1 + continue + } + state, size := decodeRuneInString(s[j:]) + switch state { + case runeErrorState: + buf = append(buf, s[i:j]...) + buf = append(buf, `\ufffd`...) + i = j + 1 + j = j + 1 + continue + // U+2028 is LINE SEPARATOR. + // U+2029 is PARAGRAPH SEPARATOR. + // They are both technically valid characters in JSON strings, + // but don't work in JSONP, which has to be evaluated as JavaScript, + // and can lead to security holes there. It is valid JSON to + // escape them, so we do so unconditionally. + // See http://timelessrepo.com/json-isnt-a-javascript-subset for discussion. + case lineSepState: + buf = append(buf, s[i:j]...) + buf = append(buf, `\u2028`...) + i = j + 3 + j = j + 3 + continue + case paragraphSepState: + buf = append(buf, s[i:j]...) + buf = append(buf, `\u2029`...) + i = j + 3 + j = j + 3 + continue + } + j += size + } + + return append(append(buf, s[i:]...), '"') +} + +func appendHTMLString(buf []byte, s string) []byte { + valLen := len(s) + if valLen == 0 { + return append(buf, `""`...) + } + buf = append(buf, '"') + var ( + i, j int + ) + if valLen >= 8 { + chunks := stringToUint64Slice(s) + for _, n := range chunks { + // combine masks before checking for the MSB of each byte. We include + // `n` in the mask to check whether any of the *input* byte MSBs were + // set (i.e. the byte was outside the ASCII range). + mask := n | (n - (lsb * 0x20)) | + ((n ^ (lsb * '"')) - lsb) | + ((n ^ (lsb * '\\')) - lsb) | + ((n ^ (lsb * '<')) - lsb) | + ((n ^ (lsb * '>')) - lsb) | + ((n ^ (lsb * '&')) - lsb) + if (mask & msb) != 0 { + j = bits.TrailingZeros64(mask&msb) / 8 + goto ESCAPE_END + } + } + for i := len(chunks) * 8; i < valLen; i++ { + if needEscapeHTML[s[i]] { + j = i + goto ESCAPE_END + } + } + // no found any escape characters. + return append(append(buf, s...), '"') + } +ESCAPE_END: + for j < valLen { + c := s[j] + + if !needEscapeHTML[c] { + // fast path: most of the time, printable ascii characters are used + j++ + continue + } + + switch c { + case '\\', '"': + buf = append(buf, s[i:j]...) + buf = append(buf, '\\', c) + i = j + 1 + j = j + 1 + continue + + case '\n': + buf = append(buf, s[i:j]...) + buf = append(buf, '\\', 'n') + i = j + 1 + j = j + 1 + continue + + case '\r': + buf = append(buf, s[i:j]...) + buf = append(buf, '\\', 'r') + i = j + 1 + j = j + 1 + continue + + case '\t': + buf = append(buf, s[i:j]...) + buf = append(buf, '\\', 't') + i = j + 1 + j = j + 1 + continue + + case '<', '>', '&': + buf = append(buf, s[i:j]...) + buf = append(buf, `\u00`...) + buf = append(buf, hex[c>>4], hex[c&0xF]) + i = j + 1 + j = j + 1 + continue + + case 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x0B, 0x0C, 0x0E, 0x0F, // 0x00-0x0F + 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E, 0x1F: // 0x10-0x1F + buf = append(buf, s[i:j]...) + buf = append(buf, `\u00`...) + buf = append(buf, hex[c>>4], hex[c&0xF]) + i = j + 1 + j = j + 1 + continue + } + j++ + } + + return append(append(buf, s[i:]...), '"') +} + +func appendNormalizedString(buf []byte, s string) []byte { + valLen := len(s) + if valLen == 0 { + return append(buf, `""`...) + } + buf = append(buf, '"') + var ( + i, j int + ) + if valLen >= 8 { + chunks := stringToUint64Slice(s) + for _, n := range chunks { + // combine masks before checking for the MSB of each byte. We include + // `n` in the mask to check whether any of the *input* byte MSBs were + // set (i.e. the byte was outside the ASCII range). + mask := n | (n - (lsb * 0x20)) | + ((n ^ (lsb * '"')) - lsb) | + ((n ^ (lsb * '\\')) - lsb) + if (mask & msb) != 0 { + j = bits.TrailingZeros64(mask&msb) / 8 + goto ESCAPE_END + } + } + valLen := len(s) + for i := len(chunks) * 8; i < valLen; i++ { + if needEscapeNormalizeUTF8[s[i]] { + j = i + goto ESCAPE_END + } + } + return append(append(buf, s...), '"') + } +ESCAPE_END: + for j < valLen { + c := s[j] + + if !needEscapeNormalizeUTF8[c] { + // fast path: most of the time, printable ascii characters are used + j++ + continue + } + + switch c { + case '\\', '"': + buf = append(buf, s[i:j]...) + buf = append(buf, '\\', c) + i = j + 1 + j = j + 1 + continue + + case '\n': + buf = append(buf, s[i:j]...) + buf = append(buf, '\\', 'n') + i = j + 1 + j = j + 1 + continue + + case '\r': + buf = append(buf, s[i:j]...) + buf = append(buf, '\\', 'r') + i = j + 1 + j = j + 1 + continue + + case '\t': + buf = append(buf, s[i:j]...) + buf = append(buf, '\\', 't') + i = j + 1 + j = j + 1 + continue + + case 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x0B, 0x0C, 0x0E, 0x0F, // 0x00-0x0F + 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E, 0x1F: // 0x10-0x1F + buf = append(buf, s[i:j]...) + buf = append(buf, `\u00`...) + buf = append(buf, hex[c>>4], hex[c&0xF]) + i = j + 1 + j = j + 1 + continue + } + + state, size := decodeRuneInString(s[j:]) + switch state { + case runeErrorState: + buf = append(buf, s[i:j]...) + buf = append(buf, `\ufffd`...) + i = j + 1 + j = j + 1 + continue + // U+2028 is LINE SEPARATOR. + // U+2029 is PARAGRAPH SEPARATOR. + // They are both technically valid characters in JSON strings, + // but don't work in JSONP, which has to be evaluated as JavaScript, + // and can lead to security holes there. It is valid JSON to + // escape them, so we do so unconditionally. + // See http://timelessrepo.com/json-isnt-a-javascript-subset for discussion. + case lineSepState: + buf = append(buf, s[i:j]...) + buf = append(buf, `\u2028`...) + i = j + 3 + j = j + 3 + continue + case paragraphSepState: + buf = append(buf, s[i:j]...) + buf = append(buf, `\u2029`...) + i = j + 3 + j = j + 3 + continue + } + j += size + } + + return append(append(buf, s[i:]...), '"') +} + +func appendString(buf []byte, s string) []byte { + valLen := len(s) + if valLen == 0 { + return append(buf, `""`...) + } + buf = append(buf, '"') + var ( + i, j int + ) + if valLen >= 8 { + chunks := stringToUint64Slice(s) + for _, n := range chunks { + // combine masks before checking for the MSB of each byte. We include + // `n` in the mask to check whether any of the *input* byte MSBs were + // set (i.e. the byte was outside the ASCII range). + mask := n | (n - (lsb * 0x20)) | + ((n ^ (lsb * '"')) - lsb) | + ((n ^ (lsb * '\\')) - lsb) + if (mask & msb) != 0 { + j = bits.TrailingZeros64(mask&msb) / 8 + goto ESCAPE_END + } + } + valLen := len(s) + for i := len(chunks) * 8; i < valLen; i++ { + if needEscape[s[i]] { + j = i + goto ESCAPE_END + } + } + return append(append(buf, s...), '"') + } +ESCAPE_END: + for j < valLen { + c := s[j] + + if !needEscape[c] { + // fast path: most of the time, printable ascii characters are used + j++ + continue + } + + switch c { + case '\\', '"': + buf = append(buf, s[i:j]...) + buf = append(buf, '\\', c) + i = j + 1 + j = j + 1 + continue + + case '\n': + buf = append(buf, s[i:j]...) + buf = append(buf, '\\', 'n') + i = j + 1 + j = j + 1 + continue + + case '\r': + buf = append(buf, s[i:j]...) + buf = append(buf, '\\', 'r') + i = j + 1 + j = j + 1 + continue + + case '\t': + buf = append(buf, s[i:j]...) + buf = append(buf, '\\', 't') + i = j + 1 + j = j + 1 + continue + + case 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x0B, 0x0C, 0x0E, 0x0F, // 0x00-0x0F + 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E, 0x1F: // 0x10-0x1F + buf = append(buf, s[i:j]...) + buf = append(buf, `\u00`...) + buf = append(buf, hex[c>>4], hex[c&0xF]) + i = j + 1 + j = j + 1 + continue + } + j++ + } + + return append(append(buf, s[i:]...), '"') +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/string_table.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/string_table.go new file mode 100644 index 00000000..ebe42c92 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/string_table.go @@ -0,0 +1,415 @@ +package encoder + +var needEscapeHTMLNormalizeUTF8 = [256]bool{ + '"': true, + '&': true, + '<': true, + '>': true, + '\\': true, + 0x00: true, + 0x01: true, + 0x02: true, + 0x03: true, + 0x04: true, + 0x05: true, + 0x06: true, + 0x07: true, + 0x08: true, + 0x09: true, + 0x0a: true, + 0x0b: true, + 0x0c: true, + 0x0d: true, + 0x0e: true, + 0x0f: true, + 0x10: true, + 0x11: true, + 0x12: true, + 0x13: true, + 0x14: true, + 0x15: true, + 0x16: true, + 0x17: true, + 0x18: true, + 0x19: true, + 0x1a: true, + 0x1b: true, + 0x1c: true, + 0x1d: true, + 0x1e: true, + 0x1f: true, + /* 0x20 - 0x7f */ + 0x80: true, + 0x81: true, + 0x82: true, + 0x83: true, + 0x84: true, + 0x85: true, + 0x86: true, + 0x87: true, + 0x88: true, + 0x89: true, + 0x8a: true, + 0x8b: true, + 0x8c: true, + 0x8d: true, + 0x8e: true, + 0x8f: true, + 0x90: true, + 0x91: true, + 0x92: true, + 0x93: true, + 0x94: true, + 0x95: true, + 0x96: true, + 0x97: true, + 0x98: true, + 0x99: true, + 0x9a: true, + 0x9b: true, + 0x9c: true, + 0x9d: true, + 0x9e: true, + 0x9f: true, + 0xa0: true, + 0xa1: true, + 0xa2: true, + 0xa3: true, + 0xa4: true, + 0xa5: true, + 0xa6: true, + 0xa7: true, + 0xa8: true, + 0xa9: true, + 0xaa: true, + 0xab: true, + 0xac: true, + 0xad: true, + 0xae: true, + 0xaf: true, + 0xb0: true, + 0xb1: true, + 0xb2: true, + 0xb3: true, + 0xb4: true, + 0xb5: true, + 0xb6: true, + 0xb7: true, + 0xb8: true, + 0xb9: true, + 0xba: true, + 0xbb: true, + 0xbc: true, + 0xbd: true, + 0xbe: true, + 0xbf: true, + 0xc0: true, + 0xc1: true, + 0xc2: true, + 0xc3: true, + 0xc4: true, + 0xc5: true, + 0xc6: true, + 0xc7: true, + 0xc8: true, + 0xc9: true, + 0xca: true, + 0xcb: true, + 0xcc: true, + 0xcd: true, + 0xce: true, + 0xcf: true, + 0xd0: true, + 0xd1: true, + 0xd2: true, + 0xd3: true, + 0xd4: true, + 0xd5: true, + 0xd6: true, + 0xd7: true, + 0xd8: true, + 0xd9: true, + 0xda: true, + 0xdb: true, + 0xdc: true, + 0xdd: true, + 0xde: true, + 0xdf: true, + 0xe0: true, + 0xe1: true, + 0xe2: true, + 0xe3: true, + 0xe4: true, + 0xe5: true, + 0xe6: true, + 0xe7: true, + 0xe8: true, + 0xe9: true, + 0xea: true, + 0xeb: true, + 0xec: true, + 0xed: true, + 0xee: true, + 0xef: true, + 0xf0: true, + 0xf1: true, + 0xf2: true, + 0xf3: true, + 0xf4: true, + 0xf5: true, + 0xf6: true, + 0xf7: true, + 0xf8: true, + 0xf9: true, + 0xfa: true, + 0xfb: true, + 0xfc: true, + 0xfd: true, + 0xfe: true, + 0xff: true, +} + +var needEscapeNormalizeUTF8 = [256]bool{ + '"': true, + '\\': true, + 0x00: true, + 0x01: true, + 0x02: true, + 0x03: true, + 0x04: true, + 0x05: true, + 0x06: true, + 0x07: true, + 0x08: true, + 0x09: true, + 0x0a: true, + 0x0b: true, + 0x0c: true, + 0x0d: true, + 0x0e: true, + 0x0f: true, + 0x10: true, + 0x11: true, + 0x12: true, + 0x13: true, + 0x14: true, + 0x15: true, + 0x16: true, + 0x17: true, + 0x18: true, + 0x19: true, + 0x1a: true, + 0x1b: true, + 0x1c: true, + 0x1d: true, + 0x1e: true, + 0x1f: true, + /* 0x20 - 0x7f */ + 0x80: true, + 0x81: true, + 0x82: true, + 0x83: true, + 0x84: true, + 0x85: true, + 0x86: true, + 0x87: true, + 0x88: true, + 0x89: true, + 0x8a: true, + 0x8b: true, + 0x8c: true, + 0x8d: true, + 0x8e: true, + 0x8f: true, + 0x90: true, + 0x91: true, + 0x92: true, + 0x93: true, + 0x94: true, + 0x95: true, + 0x96: true, + 0x97: true, + 0x98: true, + 0x99: true, + 0x9a: true, + 0x9b: true, + 0x9c: true, + 0x9d: true, + 0x9e: true, + 0x9f: true, + 0xa0: true, + 0xa1: true, + 0xa2: true, + 0xa3: true, + 0xa4: true, + 0xa5: true, + 0xa6: true, + 0xa7: true, + 0xa8: true, + 0xa9: true, + 0xaa: true, + 0xab: true, + 0xac: true, + 0xad: true, + 0xae: true, + 0xaf: true, + 0xb0: true, + 0xb1: true, + 0xb2: true, + 0xb3: true, + 0xb4: true, + 0xb5: true, + 0xb6: true, + 0xb7: true, + 0xb8: true, + 0xb9: true, + 0xba: true, + 0xbb: true, + 0xbc: true, + 0xbd: true, + 0xbe: true, + 0xbf: true, + 0xc0: true, + 0xc1: true, + 0xc2: true, + 0xc3: true, + 0xc4: true, + 0xc5: true, + 0xc6: true, + 0xc7: true, + 0xc8: true, + 0xc9: true, + 0xca: true, + 0xcb: true, + 0xcc: true, + 0xcd: true, + 0xce: true, + 0xcf: true, + 0xd0: true, + 0xd1: true, + 0xd2: true, + 0xd3: true, + 0xd4: true, + 0xd5: true, + 0xd6: true, + 0xd7: true, + 0xd8: true, + 0xd9: true, + 0xda: true, + 0xdb: true, + 0xdc: true, + 0xdd: true, + 0xde: true, + 0xdf: true, + 0xe0: true, + 0xe1: true, + 0xe2: true, + 0xe3: true, + 0xe4: true, + 0xe5: true, + 0xe6: true, + 0xe7: true, + 0xe8: true, + 0xe9: true, + 0xea: true, + 0xeb: true, + 0xec: true, + 0xed: true, + 0xee: true, + 0xef: true, + 0xf0: true, + 0xf1: true, + 0xf2: true, + 0xf3: true, + 0xf4: true, + 0xf5: true, + 0xf6: true, + 0xf7: true, + 0xf8: true, + 0xf9: true, + 0xfa: true, + 0xfb: true, + 0xfc: true, + 0xfd: true, + 0xfe: true, + 0xff: true, +} + +var needEscapeHTML = [256]bool{ + '"': true, + '&': true, + '<': true, + '>': true, + '\\': true, + 0x00: true, + 0x01: true, + 0x02: true, + 0x03: true, + 0x04: true, + 0x05: true, + 0x06: true, + 0x07: true, + 0x08: true, + 0x09: true, + 0x0a: true, + 0x0b: true, + 0x0c: true, + 0x0d: true, + 0x0e: true, + 0x0f: true, + 0x10: true, + 0x11: true, + 0x12: true, + 0x13: true, + 0x14: true, + 0x15: true, + 0x16: true, + 0x17: true, + 0x18: true, + 0x19: true, + 0x1a: true, + 0x1b: true, + 0x1c: true, + 0x1d: true, + 0x1e: true, + 0x1f: true, + /* 0x20 - 0xff */ +} + +var needEscape = [256]bool{ + '"': true, + '\\': true, + 0x00: true, + 0x01: true, + 0x02: true, + 0x03: true, + 0x04: true, + 0x05: true, + 0x06: true, + 0x07: true, + 0x08: true, + 0x09: true, + 0x0a: true, + 0x0b: true, + 0x0c: true, + 0x0d: true, + 0x0e: true, + 0x0f: true, + 0x10: true, + 0x11: true, + 0x12: true, + 0x13: true, + 0x14: true, + 0x15: true, + 0x16: true, + 0x17: true, + 0x18: true, + 0x19: true, + 0x1a: true, + 0x1b: true, + 0x1c: true, + 0x1d: true, + 0x1e: true, + 0x1f: true, + /* 0x20 - 0xff */ +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm/debug_vm.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm/debug_vm.go new file mode 100644 index 00000000..fbbc0de4 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm/debug_vm.go @@ -0,0 +1,35 @@ +package vm + +import ( + "fmt" + + "github.com/goccy/go-json/internal/encoder" +) + +func DebugRun(ctx *encoder.RuntimeContext, b []byte, codeSet *encoder.OpcodeSet) ([]byte, error) { + defer func() { + var code *encoder.Opcode + if (ctx.Option.Flag & encoder.HTMLEscapeOption) != 0 { + code = codeSet.EscapeKeyCode + } else { + code = codeSet.NoescapeKeyCode + } + + if err := recover(); err != nil { + w := ctx.Option.DebugOut + fmt.Fprintln(w, "=============[DEBUG]===============") + fmt.Fprintln(w, "* [TYPE]") + fmt.Fprintln(w, codeSet.Type) + fmt.Fprintf(w, "\n") + fmt.Fprintln(w, "* [ALL OPCODE]") + fmt.Fprintln(w, code.Dump()) + fmt.Fprintf(w, "\n") + fmt.Fprintln(w, "* [CONTEXT]") + fmt.Fprintf(w, "%+v\n", ctx) + fmt.Fprintln(w, "===================================") + panic(err) + } + }() + + return Run(ctx, b, codeSet) +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm/hack.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm/hack.go new file mode 100644 index 00000000..65252b4a --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm/hack.go @@ -0,0 +1,9 @@ +package vm + +import ( + // HACK: compile order + // `vm`, `vm_indent`, `vm_color`, `vm_color_indent` packages uses a lot of memory to compile, + // so forcibly make dependencies and avoid compiling in concurrent. + // dependency order: vm => vm_indent => vm_color => vm_color_indent + _ "github.com/goccy/go-json/internal/encoder/vm_indent" +) diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm/util.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm/util.go new file mode 100644 index 00000000..86291d7b --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm/util.go @@ -0,0 +1,207 @@ +package vm + +import ( + "encoding/json" + "fmt" + "unsafe" + + "github.com/goccy/go-json/internal/encoder" + "github.com/goccy/go-json/internal/runtime" +) + +const uintptrSize = 4 << (^uintptr(0) >> 63) + +var ( + appendInt = encoder.AppendInt + appendUint = encoder.AppendUint + appendFloat32 = encoder.AppendFloat32 + appendFloat64 = encoder.AppendFloat64 + appendString = encoder.AppendString + appendByteSlice = encoder.AppendByteSlice + appendNumber = encoder.AppendNumber + errUnsupportedValue = encoder.ErrUnsupportedValue + errUnsupportedFloat = encoder.ErrUnsupportedFloat + mapiterinit = encoder.MapIterInit + mapiterkey = encoder.MapIterKey + mapitervalue = encoder.MapIterValue + mapiternext = encoder.MapIterNext + maplen = encoder.MapLen +) + +type emptyInterface struct { + typ *runtime.Type + ptr unsafe.Pointer +} + +type nonEmptyInterface struct { + itab *struct { + ityp *runtime.Type // static interface type + typ *runtime.Type // dynamic concrete type + // unused fields... + } + ptr unsafe.Pointer +} + +func errUnimplementedOp(op encoder.OpType) error { + return fmt.Errorf("encoder: opcode %s has not been implemented", op) +} + +func load(base uintptr, idx uint32) uintptr { + addr := base + uintptr(idx) + return **(**uintptr)(unsafe.Pointer(&addr)) +} + +func store(base uintptr, idx uint32, p uintptr) { + addr := base + uintptr(idx) + **(**uintptr)(unsafe.Pointer(&addr)) = p +} + +func loadNPtr(base uintptr, idx uint32, ptrNum uint8) uintptr { + addr := base + uintptr(idx) + p := **(**uintptr)(unsafe.Pointer(&addr)) + for i := uint8(0); i < ptrNum; i++ { + if p == 0 { + return 0 + } + p = ptrToPtr(p) + } + return p +} + +func ptrToUint64(p uintptr, bitSize uint8) uint64 { + switch bitSize { + case 8: + return (uint64)(**(**uint8)(unsafe.Pointer(&p))) + case 16: + return (uint64)(**(**uint16)(unsafe.Pointer(&p))) + case 32: + return (uint64)(**(**uint32)(unsafe.Pointer(&p))) + case 64: + return **(**uint64)(unsafe.Pointer(&p)) + } + return 0 +} +func ptrToFloat32(p uintptr) float32 { return **(**float32)(unsafe.Pointer(&p)) } +func ptrToFloat64(p uintptr) float64 { return **(**float64)(unsafe.Pointer(&p)) } +func ptrToBool(p uintptr) bool { return **(**bool)(unsafe.Pointer(&p)) } +func ptrToBytes(p uintptr) []byte { return **(**[]byte)(unsafe.Pointer(&p)) } +func ptrToNumber(p uintptr) json.Number { return **(**json.Number)(unsafe.Pointer(&p)) } +func ptrToString(p uintptr) string { return **(**string)(unsafe.Pointer(&p)) } +func ptrToSlice(p uintptr) *runtime.SliceHeader { return *(**runtime.SliceHeader)(unsafe.Pointer(&p)) } +func ptrToPtr(p uintptr) uintptr { + return uintptr(**(**unsafe.Pointer)(unsafe.Pointer(&p))) +} +func ptrToNPtr(p uintptr, ptrNum uint8) uintptr { + for i := uint8(0); i < ptrNum; i++ { + if p == 0 { + return 0 + } + p = ptrToPtr(p) + } + return p +} + +func ptrToUnsafePtr(p uintptr) unsafe.Pointer { + return *(*unsafe.Pointer)(unsafe.Pointer(&p)) +} +func ptrToInterface(code *encoder.Opcode, p uintptr) interface{} { + return *(*interface{})(unsafe.Pointer(&emptyInterface{ + typ: code.Type, + ptr: *(*unsafe.Pointer)(unsafe.Pointer(&p)), + })) +} + +func appendBool(_ *encoder.RuntimeContext, b []byte, v bool) []byte { + if v { + return append(b, "true"...) + } + return append(b, "false"...) +} + +func appendNull(_ *encoder.RuntimeContext, b []byte) []byte { + return append(b, "null"...) +} + +func appendComma(_ *encoder.RuntimeContext, b []byte) []byte { + return append(b, ',') +} + +func appendNullComma(_ *encoder.RuntimeContext, b []byte) []byte { + return append(b, "null,"...) +} + +func appendColon(_ *encoder.RuntimeContext, b []byte) []byte { + last := len(b) - 1 + b[last] = ':' + return b +} + +func appendMapKeyValue(_ *encoder.RuntimeContext, _ *encoder.Opcode, b, key, value []byte) []byte { + b = append(b, key...) + b[len(b)-1] = ':' + return append(b, value...) +} + +func appendMapEnd(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte { + b[len(b)-1] = '}' + b = append(b, ',') + return b +} + +func appendMarshalJSON(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte, v interface{}) ([]byte, error) { + return encoder.AppendMarshalJSON(ctx, code, b, v) +} + +func appendMarshalText(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte, v interface{}) ([]byte, error) { + return encoder.AppendMarshalText(ctx, code, b, v) +} + +func appendArrayHead(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte { + return append(b, '[') +} + +func appendArrayEnd(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte { + last := len(b) - 1 + b[last] = ']' + return append(b, ',') +} + +func appendEmptyArray(_ *encoder.RuntimeContext, b []byte) []byte { + return append(b, '[', ']', ',') +} + +func appendEmptyObject(_ *encoder.RuntimeContext, b []byte) []byte { + return append(b, '{', '}', ',') +} + +func appendObjectEnd(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte { + last := len(b) - 1 + b[last] = '}' + return append(b, ',') +} + +func appendStructHead(_ *encoder.RuntimeContext, b []byte) []byte { + return append(b, '{') +} + +func appendStructKey(_ *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte { + return append(b, code.Key...) +} + +func appendStructEnd(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte { + return append(b, '}', ',') +} + +func appendStructEndSkipLast(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte { + last := len(b) - 1 + if b[last] == ',' { + b[last] = '}' + return appendComma(ctx, b) + } + return appendStructEnd(ctx, code, b) +} + +func restoreIndent(_ *encoder.RuntimeContext, _ *encoder.Opcode, _ uintptr) {} +func storeIndent(_ uintptr, _ *encoder.Opcode, _ uintptr) {} +func appendMapKeyIndent(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte { return b } +func appendArrayElemIndent(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte { return b } diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm/vm.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm/vm.go new file mode 100644 index 00000000..645d20f9 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm/vm.go @@ -0,0 +1,4859 @@ +// Code generated by internal/cmd/generator. DO NOT EDIT! +package vm + +import ( + "math" + "reflect" + "sort" + "unsafe" + + "github.com/goccy/go-json/internal/encoder" + "github.com/goccy/go-json/internal/runtime" +) + +func Run(ctx *encoder.RuntimeContext, b []byte, codeSet *encoder.OpcodeSet) ([]byte, error) { + recursiveLevel := 0 + ptrOffset := uintptr(0) + ctxptr := ctx.Ptr() + var code *encoder.Opcode + if (ctx.Option.Flag & encoder.HTMLEscapeOption) != 0 { + code = codeSet.EscapeKeyCode + } else { + code = codeSet.NoescapeKeyCode + } + + for { + switch code.Op { + default: + return nil, errUnimplementedOp(code.Op) + case encoder.OpPtr: + p := load(ctxptr, code.Idx) + code = code.Next + store(ctxptr, code.Idx, ptrToPtr(p)) + case encoder.OpIntPtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpInt: + b = appendInt(ctx, b, load(ctxptr, code.Idx), code) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpUintPtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpUint: + b = appendUint(ctx, b, load(ctxptr, code.Idx), code) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpIntString: + b = append(b, '"') + b = appendInt(ctx, b, load(ctxptr, code.Idx), code) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpUintString: + b = append(b, '"') + b = appendUint(ctx, b, load(ctxptr, code.Idx), code) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpFloat32Ptr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + b = appendComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpFloat32: + b = appendFloat32(ctx, b, ptrToFloat32(load(ctxptr, code.Idx))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpFloat64Ptr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpFloat64: + v := ptrToFloat64(load(ctxptr, code.Idx)) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStringPtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpString: + b = appendString(ctx, b, ptrToString(load(ctxptr, code.Idx))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpBoolPtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpBool: + b = appendBool(ctx, b, ptrToBool(load(ctxptr, code.Idx))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpBytesPtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpBytes: + b = appendByteSlice(ctx, b, ptrToBytes(load(ctxptr, code.Idx))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpNumberPtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpNumber: + bb, err := appendNumber(ctx, b, ptrToNumber(load(ctxptr, code.Idx))) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + code = code.Next + case encoder.OpInterfacePtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpInterface: + p := load(ctxptr, code.Idx) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + if recursiveLevel > encoder.StartDetectingCyclesAfter { + for _, seen := range ctx.SeenPtr { + if p == seen { + return nil, errUnsupportedValue(code, p) + } + } + } + ctx.SeenPtr = append(ctx.SeenPtr, p) + var ( + typ *runtime.Type + ifacePtr unsafe.Pointer + ) + up := ptrToUnsafePtr(p) + if code.Flags&encoder.NonEmptyInterfaceFlags != 0 { + iface := (*nonEmptyInterface)(up) + ifacePtr = iface.ptr + if iface.itab != nil { + typ = iface.itab.typ + } + } else { + iface := (*emptyInterface)(up) + ifacePtr = iface.ptr + typ = iface.typ + } + if ifacePtr == nil { + isDirectedNil := typ != nil && typ.Kind() == reflect.Struct && !runtime.IfaceIndir(typ) + if !isDirectedNil { + b = appendNullComma(ctx, b) + code = code.Next + break + } + } + ctx.KeepRefs = append(ctx.KeepRefs, up) + ifaceCodeSet, err := encoder.CompileToGetCodeSet(ctx, uintptr(unsafe.Pointer(typ))) + if err != nil { + return nil, err + } + + totalLength := uintptr(code.Length) + 3 + nextTotalLength := uintptr(ifaceCodeSet.CodeLength) + 3 + + var c *encoder.Opcode + if (ctx.Option.Flag & encoder.HTMLEscapeOption) != 0 { + c = ifaceCodeSet.InterfaceEscapeKeyCode + } else { + c = ifaceCodeSet.InterfaceNoescapeKeyCode + } + curlen := uintptr(len(ctx.Ptrs)) + offsetNum := ptrOffset / uintptrSize + oldOffset := ptrOffset + ptrOffset += totalLength * uintptrSize + oldBaseIndent := ctx.BaseIndent + ctx.BaseIndent += code.Indent + + newLen := offsetNum + totalLength + nextTotalLength + if curlen < newLen { + ctx.Ptrs = append(ctx.Ptrs, make([]uintptr, newLen-curlen)...) + } + ctxptr = ctx.Ptr() + ptrOffset // assign new ctxptr + + end := ifaceCodeSet.EndCode + store(ctxptr, c.Idx, uintptr(ifacePtr)) + store(ctxptr, end.Idx, oldOffset) + store(ctxptr, end.ElemIdx, uintptr(unsafe.Pointer(code.Next))) + storeIndent(ctxptr, end, uintptr(oldBaseIndent)) + code = c + recursiveLevel++ + case encoder.OpInterfaceEnd: + recursiveLevel-- + + // restore ctxptr + offset := load(ctxptr, code.Idx) + restoreIndent(ctx, code, ctxptr) + ctx.SeenPtr = ctx.SeenPtr[:len(ctx.SeenPtr)-1] + + codePtr := load(ctxptr, code.ElemIdx) + code = (*encoder.Opcode)(ptrToUnsafePtr(codePtr)) + ctxptr = ctx.Ptr() + offset + ptrOffset = offset + case encoder.OpMarshalJSONPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, ptrToPtr(p)) + fallthrough + case encoder.OpMarshalJSON: + p := load(ctxptr, code.Idx) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + if (code.Flags&encoder.IsNilableTypeFlags) != 0 && (code.Flags&encoder.IndirectFlags) != 0 { + p = ptrToPtr(p) + } + bb, err := appendMarshalJSON(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + code = code.Next + case encoder.OpMarshalTextPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, ptrToPtr(p)) + fallthrough + case encoder.OpMarshalText: + p := load(ctxptr, code.Idx) + if p == 0 { + b = append(b, `""`...) + b = appendComma(ctx, b) + code = code.Next + break + } + if (code.Flags&encoder.IsNilableTypeFlags) != 0 && (code.Flags&encoder.IndirectFlags) != 0 { + p = ptrToPtr(p) + } + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + code = code.Next + case encoder.OpSlicePtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.End.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpSlice: + p := load(ctxptr, code.Idx) + slice := ptrToSlice(p) + if p == 0 || slice.Data == nil { + b = appendNullComma(ctx, b) + code = code.End.Next + break + } + store(ctxptr, code.ElemIdx, 0) + store(ctxptr, code.Length, uintptr(slice.Len)) + store(ctxptr, code.Idx, uintptr(slice.Data)) + if slice.Len > 0 { + b = appendArrayHead(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, uintptr(slice.Data)) + } else { + b = appendEmptyArray(ctx, b) + code = code.End.Next + } + case encoder.OpSliceElem: + idx := load(ctxptr, code.ElemIdx) + length := load(ctxptr, code.Length) + idx++ + if idx < length { + b = appendArrayElemIndent(ctx, code, b) + store(ctxptr, code.ElemIdx, idx) + data := load(ctxptr, code.Idx) + size := uintptr(code.Size) + code = code.Next + store(ctxptr, code.Idx, data+idx*size) + } else { + b = appendArrayEnd(ctx, code, b) + code = code.End.Next + } + case encoder.OpArrayPtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.End.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpArray: + p := load(ctxptr, code.Idx) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.End.Next + break + } + if code.Length > 0 { + b = appendArrayHead(ctx, code, b) + store(ctxptr, code.ElemIdx, 0) + code = code.Next + store(ctxptr, code.Idx, p) + } else { + b = appendEmptyArray(ctx, b) + code = code.End.Next + } + case encoder.OpArrayElem: + idx := load(ctxptr, code.ElemIdx) + idx++ + if idx < uintptr(code.Length) { + b = appendArrayElemIndent(ctx, code, b) + store(ctxptr, code.ElemIdx, idx) + p := load(ctxptr, code.Idx) + size := uintptr(code.Size) + code = code.Next + store(ctxptr, code.Idx, p+idx*size) + } else { + b = appendArrayEnd(ctx, code, b) + code = code.End.Next + } + case encoder.OpMapPtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.End.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpMap: + p := load(ctxptr, code.Idx) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.End.Next + break + } + uptr := ptrToUnsafePtr(p) + mlen := maplen(uptr) + if mlen <= 0 { + b = appendEmptyObject(ctx, b) + code = code.End.Next + break + } + b = appendStructHead(ctx, b) + unorderedMap := (ctx.Option.Flag & encoder.UnorderedMapOption) != 0 + mapCtx := encoder.NewMapContext(mlen, unorderedMap) + mapiterinit(code.Type, uptr, &mapCtx.Iter) + store(ctxptr, code.Idx, uintptr(unsafe.Pointer(mapCtx))) + ctx.KeepRefs = append(ctx.KeepRefs, unsafe.Pointer(mapCtx)) + if unorderedMap { + b = appendMapKeyIndent(ctx, code.Next, b) + } else { + mapCtx.Start = len(b) + mapCtx.First = len(b) + } + key := mapiterkey(&mapCtx.Iter) + store(ctxptr, code.Next.Idx, uintptr(key)) + code = code.Next + case encoder.OpMapKey: + mapCtx := (*encoder.MapContext)(ptrToUnsafePtr(load(ctxptr, code.Idx))) + idx := mapCtx.Idx + idx++ + if (ctx.Option.Flag & encoder.UnorderedMapOption) != 0 { + if idx < mapCtx.Len { + b = appendMapKeyIndent(ctx, code, b) + mapCtx.Idx = int(idx) + key := mapiterkey(&mapCtx.Iter) + store(ctxptr, code.Next.Idx, uintptr(key)) + code = code.Next + } else { + b = appendObjectEnd(ctx, code, b) + encoder.ReleaseMapContext(mapCtx) + code = code.End.Next + } + } else { + mapCtx.Slice.Items[mapCtx.Idx].Value = b[mapCtx.Start:len(b)] + if idx < mapCtx.Len { + mapCtx.Idx = int(idx) + mapCtx.Start = len(b) + key := mapiterkey(&mapCtx.Iter) + store(ctxptr, code.Next.Idx, uintptr(key)) + code = code.Next + } else { + code = code.End + } + } + case encoder.OpMapValue: + mapCtx := (*encoder.MapContext)(ptrToUnsafePtr(load(ctxptr, code.Idx))) + if (ctx.Option.Flag & encoder.UnorderedMapOption) != 0 { + b = appendColon(ctx, b) + } else { + mapCtx.Slice.Items[mapCtx.Idx].Key = b[mapCtx.Start:len(b)] + mapCtx.Start = len(b) + } + value := mapitervalue(&mapCtx.Iter) + store(ctxptr, code.Next.Idx, uintptr(value)) + mapiternext(&mapCtx.Iter) + code = code.Next + case encoder.OpMapEnd: + // this operation only used by sorted map. + mapCtx := (*encoder.MapContext)(ptrToUnsafePtr(load(ctxptr, code.Idx))) + sort.Sort(mapCtx.Slice) + buf := mapCtx.Buf + for _, item := range mapCtx.Slice.Items { + buf = appendMapKeyValue(ctx, code, buf, item.Key, item.Value) + } + buf = appendMapEnd(ctx, code, buf) + b = b[:mapCtx.First] + b = append(b, buf...) + mapCtx.Buf = buf + encoder.ReleaseMapContext(mapCtx) + code = code.Next + case encoder.OpRecursivePtr: + p := load(ctxptr, code.Idx) + if p == 0 { + code = code.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpRecursive: + ptr := load(ctxptr, code.Idx) + if ptr != 0 { + if recursiveLevel > encoder.StartDetectingCyclesAfter { + for _, seen := range ctx.SeenPtr { + if ptr == seen { + return nil, errUnsupportedValue(code, ptr) + } + } + } + } + ctx.SeenPtr = append(ctx.SeenPtr, ptr) + c := code.Jmp.Code + curlen := uintptr(len(ctx.Ptrs)) + offsetNum := ptrOffset / uintptrSize + oldOffset := ptrOffset + ptrOffset += code.Jmp.CurLen * uintptrSize + oldBaseIndent := ctx.BaseIndent + indentDiffFromTop := c.Indent - 1 + ctx.BaseIndent += code.Indent - indentDiffFromTop + + newLen := offsetNum + code.Jmp.CurLen + code.Jmp.NextLen + if curlen < newLen { + ctx.Ptrs = append(ctx.Ptrs, make([]uintptr, newLen-curlen)...) + } + ctxptr = ctx.Ptr() + ptrOffset // assign new ctxptr + + store(ctxptr, c.Idx, ptr) + store(ctxptr, c.End.Next.Idx, oldOffset) + store(ctxptr, c.End.Next.ElemIdx, uintptr(unsafe.Pointer(code.Next))) + storeIndent(ctxptr, c.End.Next, uintptr(oldBaseIndent)) + code = c + recursiveLevel++ + case encoder.OpRecursiveEnd: + recursiveLevel-- + + // restore ctxptr + restoreIndent(ctx, code, ctxptr) + offset := load(ctxptr, code.Idx) + ctx.SeenPtr = ctx.SeenPtr[:len(ctx.SeenPtr)-1] + + codePtr := load(ctxptr, code.ElemIdx) + code = (*encoder.Opcode)(ptrToUnsafePtr(codePtr)) + ctxptr = ctx.Ptr() + offset + ptrOffset = offset + case encoder.OpStructPtrHead: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHead: + p := load(ctxptr, code.Idx) + if p == 0 && ((code.Flags&encoder.IndirectFlags) != 0 || code.Next.Op == encoder.OpStructEnd) { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if len(code.Key) > 0 { + if (code.Flags&encoder.IsTaggedKeyFlags) != 0 || code.Flags&encoder.AnonymousKeyFlags == 0 { + b = appendStructKey(ctx, code, b) + } + } + p += uintptr(code.Offset) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructPtrHeadOmitEmpty: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmpty: + p := load(ctxptr, code.Idx) + if p == 0 && ((code.Flags&encoder.IndirectFlags) != 0 || code.Next.Op == encoder.OpStructEnd) { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + p += uintptr(code.Offset) + if p == 0 || (ptrToPtr(p) == 0 && (code.Flags&encoder.IsNextOpPtrTypeFlags) != 0) { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructPtrHeadInt: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadInt: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyInt: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyInt: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadIntString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadIntString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyIntString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyIntString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + p += uintptr(code.Offset) + u64 := ptrToUint64(p, code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p, code) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadIntPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadIntPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendInt(ctx, b, p, code) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyIntPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyIntPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p, code) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadIntPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadIntPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendInt(ctx, b, p, code) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyIntPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyIntPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p, code) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadUint: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadUint: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyUint: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyUint: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadUintString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadUintString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyUintString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyUintString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadUintPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadUintPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendUint(ctx, b, p, code) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyUintPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyUintPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p, code) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadUintPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadUintPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendUint(ctx, b, p, code) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyUintPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyUintPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p, code) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadFloat32: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadFloat32: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, ptrToFloat32(p+uintptr(code.Offset))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyFloat32: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyFloat32: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToFloat32(p + uintptr(code.Offset)) + if v == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadFloat32String: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadFloat32String: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p+uintptr(code.Offset))) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyFloat32String: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyFloat32String: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToFloat32(p + uintptr(code.Offset)) + if v == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadFloat32Ptr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadFloat32Ptr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendFloat32(ctx, b, ptrToFloat32(p)) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyFloat32Ptr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyFloat32Ptr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadFloat32PtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadFloat32PtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyFloat32PtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyFloat32PtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadFloat64: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadFloat64: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + v := ptrToFloat64(p + uintptr(code.Offset)) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = appendFloat64(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyFloat64: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyFloat64: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToFloat64(p + uintptr(code.Offset)) + if v == 0 { + code = code.NextField + } else { + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = appendFloat64(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadFloat64String: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadFloat64String: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToFloat64(p + uintptr(code.Offset)) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyFloat64String: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyFloat64String: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToFloat64(p + uintptr(code.Offset)) + if v == 0 { + code = code.NextField + } else { + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadFloat64Ptr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadFloat64Ptr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyFloat64Ptr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyFloat64Ptr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadFloat64PtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadFloat64PtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyFloat64PtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyFloat64PtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNull(ctx, b) + b = appendComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, ptrToString(p+uintptr(code.Offset))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToString(p + uintptr(code.Offset)) + if v == "" { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadStringString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadStringString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, ptrToString(p+uintptr(code.Offset))))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyStringString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyStringString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToString(p + uintptr(code.Offset)) + if v == "" { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, v))) + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadStringPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadStringPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendString(ctx, b, ptrToString(p)) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyStringPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyStringPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, ptrToString(p)) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadStringPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadStringPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendString(ctx, b, string(appendString(ctx, []byte{}, ptrToString(p)))) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyStringPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyStringPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, ptrToString(p)))) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadBool: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadBool: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, ptrToBool(p+uintptr(code.Offset))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyBool: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyBool: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToBool(p + uintptr(code.Offset)) + if v { + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + } else { + code = code.NextField + } + case encoder.OpStructPtrHeadBoolString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadBoolString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p+uintptr(code.Offset))) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyBoolString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyBoolString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToBool(p + uintptr(code.Offset)) + if v { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + } else { + code = code.NextField + } + case encoder.OpStructPtrHeadBoolPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadBoolPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendBool(ctx, b, ptrToBool(p)) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyBoolPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyBoolPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, ptrToBool(p)) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadBoolPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadBoolPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p)) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyBoolPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyBoolPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p)) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadBytes: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadBytes: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, ptrToBytes(p+uintptr(code.Offset))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyBytes: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyBytes: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToBytes(p + uintptr(code.Offset)) + if len(v) == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadBytesPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadBytesPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendByteSlice(ctx, b, ptrToBytes(p)) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyBytesPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyBytesPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, ptrToBytes(p)) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadNumber: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadNumber: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, ptrToNumber(p+uintptr(code.Offset))) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyNumber: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyNumber: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToNumber(p + uintptr(code.Offset)) + if v == "" { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, v) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + code = code.Next + } + case encoder.OpStructPtrHeadNumberString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadNumberString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p+uintptr(code.Offset))) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyNumberString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyNumberString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToNumber(p + uintptr(code.Offset)) + if v == "" { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, v) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadNumberPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadNumberPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyNumberPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyNumberPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + } + code = code.Next + case encoder.OpStructPtrHeadNumberPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadNumberPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = append(bb, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyNumberPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyNumberPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadArray, encoder.OpStructPtrHeadSlice: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadArray, encoder.OpStructHeadSlice: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + p += uintptr(code.Offset) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructPtrHeadOmitEmptyArray: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyArray: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + p += uintptr(code.Offset) + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructPtrHeadOmitEmptySlice: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptySlice: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + p += uintptr(code.Offset) + slice := ptrToSlice(p) + if slice.Len == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructPtrHeadArrayPtr, encoder.OpStructPtrHeadSlicePtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadArrayPtr, encoder.OpStructHeadSlicePtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNullComma(ctx, b) + code = code.NextField + } else { + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructPtrHeadOmitEmptyArrayPtr, encoder.OpStructPtrHeadOmitEmptySlicePtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyArrayPtr, encoder.OpStructHeadOmitEmptySlicePtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructPtrHeadMap: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadMap: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if p != 0 && (code.Flags&encoder.IndirectFlags) != 0 { + p = ptrToPtr(p + uintptr(code.Offset)) + } + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructPtrHeadOmitEmptyMap: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyMap: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if p != 0 && (code.Flags&encoder.IndirectFlags) != 0 { + p = ptrToPtr(p + uintptr(code.Offset)) + } + if maplen(ptrToUnsafePtr(p)) == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructPtrHeadMapPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadMapPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.NextField + break + } + p = ptrToPtr(p + uintptr(code.Offset)) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.NextField + } else { + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p, code.PtrNum) + } + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructPtrHeadOmitEmptyMapPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyMapPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if p == 0 { + code = code.NextField + break + } + p = ptrToPtr(p + uintptr(code.Offset)) + if p == 0 { + code = code.NextField + } else { + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p, code.PtrNum) + } + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructPtrHeadMarshalJSON: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if (code.Flags & encoder.IndirectFlags) != 0 { + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadMarshalJSON: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + p += uintptr(code.Offset) + if (code.Flags & encoder.IsNilableTypeFlags) != 0 { + if (code.Flags&encoder.IndirectFlags) != 0 || code.Op == encoder.OpStructPtrHeadMarshalJSON { + p = ptrToPtr(p) + } + } + if p == 0 && (code.Flags&encoder.NilCheckFlags) != 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendMarshalJSON(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyMarshalJSON: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if (code.Flags & encoder.IndirectFlags) != 0 { + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyMarshalJSON: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + p += uintptr(code.Offset) + if (code.Flags & encoder.IsNilableTypeFlags) != 0 { + if (code.Flags&encoder.IndirectFlags) != 0 || code.Op == encoder.OpStructPtrHeadOmitEmptyMarshalJSON { + p = ptrToPtr(p) + } + } + iface := ptrToInterface(code, p) + if (code.Flags&encoder.NilCheckFlags) != 0 && encoder.IsNilForMarshaler(iface) { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + bb, err := appendMarshalJSON(ctx, code, b, iface) + if err != nil { + return nil, err + } + b = bb + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadMarshalJSONPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadMarshalJSONPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendMarshalJSON(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyMarshalJSONPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyMarshalJSONPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if p == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + bb, err := appendMarshalJSON(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadMarshalText: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if (code.Flags & encoder.IndirectFlags) != 0 { + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadMarshalText: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + p += uintptr(code.Offset) + if (code.Flags & encoder.IsNilableTypeFlags) != 0 { + if (code.Flags&encoder.IndirectFlags) != 0 || code.Op == encoder.OpStructPtrHeadMarshalText { + p = ptrToPtr(p) + } + } + if p == 0 && (code.Flags&encoder.NilCheckFlags) != 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyMarshalText: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if (code.Flags & encoder.IndirectFlags) != 0 { + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyMarshalText: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + p += uintptr(code.Offset) + if (code.Flags & encoder.IsNilableTypeFlags) != 0 { + if (code.Flags&encoder.IndirectFlags) != 0 || code.Op == encoder.OpStructPtrHeadOmitEmptyMarshalText { + p = ptrToPtr(p) + } + } + if p == 0 && (code.Flags&encoder.NilCheckFlags) != 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadMarshalTextPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadMarshalTextPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyMarshalTextPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyMarshalTextPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if p == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructField: + if code.Flags&encoder.IsTaggedKeyFlags != 0 || code.Flags&encoder.AnonymousKeyFlags == 0 { + b = appendStructKey(ctx, code, b) + } + p := load(ctxptr, code.Idx) + uintptr(code.Offset) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldOmitEmpty: + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + if ptrToPtr(p) == 0 && (code.Flags&encoder.IsNextOpPtrTypeFlags) != 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructFieldInt: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyInt: + p := load(ctxptr, code.Idx) + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldIntString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyIntString: + p := load(ctxptr, code.Idx) + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldIntPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendInt(ctx, b, p, code) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyIntPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p, code) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldIntPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendInt(ctx, b, p, code) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyIntPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p, code) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldUint: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyUint: + p := load(ctxptr, code.Idx) + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldUintString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyUintString: + p := load(ctxptr, code.Idx) + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldUintPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendUint(ctx, b, p, code) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyUintPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p, code) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldUintPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendUint(ctx, b, p, code) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyUintPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p, code) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldFloat32: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, ptrToFloat32(p+uintptr(code.Offset))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyFloat32: + p := load(ctxptr, code.Idx) + v := ptrToFloat32(p + uintptr(code.Offset)) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, v) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldFloat32String: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p+uintptr(code.Offset))) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyFloat32String: + p := load(ctxptr, code.Idx) + v := ptrToFloat32(p + uintptr(code.Offset)) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldFloat32Ptr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendFloat32(ctx, b, ptrToFloat32(p)) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyFloat32Ptr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldFloat32PtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyFloat32PtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldFloat64: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + v := ptrToFloat64(p + uintptr(code.Offset)) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyFloat64: + p := load(ctxptr, code.Idx) + v := ptrToFloat64(p + uintptr(code.Offset)) + if v != 0 { + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = appendFloat64(ctx, b, v) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldFloat64String: + p := load(ctxptr, code.Idx) + v := ptrToFloat64(p + uintptr(code.Offset)) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyFloat64String: + p := load(ctxptr, code.Idx) + v := ptrToFloat64(p + uintptr(code.Offset)) + if v != 0 { + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldFloat64Ptr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyFloat64Ptr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldFloat64PtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = append(b, '"') + b = appendFloat64(ctx, b, v) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyFloat64PtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, ptrToString(p+uintptr(code.Offset))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyString: + p := load(ctxptr, code.Idx) + v := ptrToString(p + uintptr(code.Offset)) + if v != "" { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, v) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldStringString: + p := load(ctxptr, code.Idx) + s := ptrToString(p + uintptr(code.Offset)) + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, s))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyStringString: + p := load(ctxptr, code.Idx) + v := ptrToString(p + uintptr(code.Offset)) + if v != "" { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, v))) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldStringPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendString(ctx, b, ptrToString(p)) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyStringPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, ptrToString(p)) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldStringPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendString(ctx, b, string(appendString(ctx, []byte{}, ptrToString(p)))) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyStringPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, ptrToString(p)))) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldBool: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, ptrToBool(p+uintptr(code.Offset))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyBool: + p := load(ctxptr, code.Idx) + v := ptrToBool(p + uintptr(code.Offset)) + if v { + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, v) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldBoolString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p+uintptr(code.Offset))) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyBoolString: + p := load(ctxptr, code.Idx) + v := ptrToBool(p + uintptr(code.Offset)) + if v { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldBoolPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendBool(ctx, b, ptrToBool(p)) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyBoolPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, ptrToBool(p)) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldBoolPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p)) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyBoolPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p)) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldBytes: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, ptrToBytes(p+uintptr(code.Offset))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyBytes: + p := load(ctxptr, code.Idx) + v := ptrToBytes(p + uintptr(code.Offset)) + if len(v) > 0 { + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, v) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldBytesPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendByteSlice(ctx, b, ptrToBytes(p)) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyBytesPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, ptrToBytes(p)) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldNumber: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, ptrToNumber(p+uintptr(code.Offset))) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + code = code.Next + case encoder.OpStructFieldOmitEmptyNumber: + p := load(ctxptr, code.Idx) + v := ptrToNumber(p + uintptr(code.Offset)) + if v != "" { + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, v) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + } + code = code.Next + case encoder.OpStructFieldNumberString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p+uintptr(code.Offset))) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyNumberString: + p := load(ctxptr, code.Idx) + v := ptrToNumber(p + uintptr(code.Offset)) + if v != "" { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, v) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldNumberPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyNumberPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + } + code = code.Next + case encoder.OpStructFieldNumberPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = append(bb, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyNumberPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldMarshalJSON: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + p += uintptr(code.Offset) + if (code.Flags & encoder.IsNilableTypeFlags) != 0 { + p = ptrToPtr(p) + } + if p == 0 && (code.Flags&encoder.NilCheckFlags) != 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendMarshalJSON(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyMarshalJSON: + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + if (code.Flags & encoder.IsNilableTypeFlags) != 0 { + p = ptrToPtr(p) + } + if p == 0 && (code.Flags&encoder.NilCheckFlags) != 0 { + code = code.NextField + break + } + iface := ptrToInterface(code, p) + if (code.Flags&encoder.NilCheckFlags) != 0 && encoder.IsNilForMarshaler(iface) { + code = code.NextField + break + } + b = appendStructKey(ctx, code, b) + bb, err := appendMarshalJSON(ctx, code, b, iface) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + code = code.Next + case encoder.OpStructFieldMarshalJSONPtr: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendMarshalJSON(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyMarshalJSONPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + bb, err := appendMarshalJSON(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + } + code = code.Next + case encoder.OpStructFieldMarshalText: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + p += uintptr(code.Offset) + if (code.Flags & encoder.IsNilableTypeFlags) != 0 { + p = ptrToPtr(p) + } + if p == 0 && (code.Flags&encoder.NilCheckFlags) != 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyMarshalText: + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + if (code.Flags & encoder.IsNilableTypeFlags) != 0 { + p = ptrToPtr(p) + } + if p == 0 && (code.Flags&encoder.NilCheckFlags) != 0 { + code = code.NextField + break + } + b = appendStructKey(ctx, code, b) + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + code = code.Next + case encoder.OpStructFieldMarshalTextPtr: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyMarshalTextPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + } + code = code.Next + case encoder.OpStructFieldArray: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldOmitEmptyArray: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldArrayPtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldOmitEmptyArrayPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } else { + code = code.NextField + } + case encoder.OpStructFieldSlice: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldOmitEmptySlice: + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + slice := ptrToSlice(p) + if slice.Len == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructFieldSlicePtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldOmitEmptySlicePtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } else { + code = code.NextField + } + case encoder.OpStructFieldMap: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToPtr(p + uintptr(code.Offset)) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldOmitEmptyMap: + p := load(ctxptr, code.Idx) + p = ptrToPtr(p + uintptr(code.Offset)) + if p == 0 || maplen(ptrToUnsafePtr(p)) == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructFieldMapPtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToPtr(p + uintptr(code.Offset)) + if p != 0 { + p = ptrToNPtr(p, code.PtrNum) + } + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldOmitEmptyMapPtr: + p := load(ctxptr, code.Idx) + p = ptrToPtr(p + uintptr(code.Offset)) + if p != 0 { + p = ptrToNPtr(p, code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } else { + code = code.NextField + } + case encoder.OpStructFieldStruct: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldOmitEmptyStruct: + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + if ptrToPtr(p) == 0 && (code.Flags&encoder.IsNextOpPtrTypeFlags) != 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructEnd: + b = appendStructEndSkipLast(ctx, code, b) + code = code.Next + case encoder.OpStructEndInt: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyInt: + p := load(ctxptr, code.Idx) + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndIntString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyIntString: + p := load(ctxptr, code.Idx) + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndIntPtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendInt(ctx, b, p, code) + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyIntPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p, code) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndIntPtrString: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendInt(ctx, b, p, code) + b = append(b, '"') + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyIntPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p, code) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndUint: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyUint: + p := load(ctxptr, code.Idx) + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndUintString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyUintString: + p := load(ctxptr, code.Idx) + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndUintPtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendUint(ctx, b, p, code) + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyUintPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p, code) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndUintPtrString: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendUint(ctx, b, p, code) + b = append(b, '"') + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyUintPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p, code) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndFloat32: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, ptrToFloat32(p+uintptr(code.Offset))) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyFloat32: + p := load(ctxptr, code.Idx) + v := ptrToFloat32(p + uintptr(code.Offset)) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, v) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndFloat32String: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p+uintptr(code.Offset))) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyFloat32String: + p := load(ctxptr, code.Idx) + v := ptrToFloat32(p + uintptr(code.Offset)) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, v) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndFloat32Ptr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendFloat32(ctx, b, ptrToFloat32(p)) + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyFloat32Ptr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndFloat32PtrString: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = append(b, '"') + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyFloat32PtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndFloat64: + p := load(ctxptr, code.Idx) + v := ptrToFloat64(p + uintptr(code.Offset)) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = appendFloat64(ctx, b, v) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyFloat64: + p := load(ctxptr, code.Idx) + v := ptrToFloat64(p + uintptr(code.Offset)) + if v != 0 { + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = appendFloat64(ctx, b, v) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndFloat64String: + p := load(ctxptr, code.Idx) + v := ptrToFloat64(p + uintptr(code.Offset)) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyFloat64String: + p := load(ctxptr, code.Idx) + v := ptrToFloat64(p + uintptr(code.Offset)) + if v != 0 { + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndFloat64Ptr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + b = appendStructEnd(ctx, code, b) + code = code.Next + break + } + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyFloat64Ptr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndFloat64PtrString: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = append(b, '"') + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyFloat64PtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = append(b, '"') + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, ptrToString(p+uintptr(code.Offset))) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyString: + p := load(ctxptr, code.Idx) + v := ptrToString(p + uintptr(code.Offset)) + if v != "" { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, v) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndStringString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + s := ptrToString(p + uintptr(code.Offset)) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, s))) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyStringString: + p := load(ctxptr, code.Idx) + v := ptrToString(p + uintptr(code.Offset)) + if v != "" { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, v))) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndStringPtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendString(ctx, b, ptrToString(p)) + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyStringPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, ptrToString(p)) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndStringPtrString: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendString(ctx, b, string(appendString(ctx, []byte{}, ptrToString(p)))) + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyStringPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, ptrToString(p)))) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndBool: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, ptrToBool(p+uintptr(code.Offset))) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyBool: + p := load(ctxptr, code.Idx) + v := ptrToBool(p + uintptr(code.Offset)) + if v { + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, v) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndBoolString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p+uintptr(code.Offset))) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyBoolString: + p := load(ctxptr, code.Idx) + v := ptrToBool(p + uintptr(code.Offset)) + if v { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, v) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndBoolPtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendBool(ctx, b, ptrToBool(p)) + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyBoolPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, ptrToBool(p)) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndBoolPtrString: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p)) + b = append(b, '"') + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyBoolPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p)) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndBytes: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, ptrToBytes(p+uintptr(code.Offset))) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyBytes: + p := load(ctxptr, code.Idx) + v := ptrToBytes(p + uintptr(code.Offset)) + if len(v) > 0 { + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, v) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndBytesPtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendByteSlice(ctx, b, ptrToBytes(p)) + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyBytesPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, ptrToBytes(p)) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndNumber: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, ptrToNumber(p+uintptr(code.Offset))) + if err != nil { + return nil, err + } + b = appendStructEnd(ctx, code, bb) + code = code.Next + case encoder.OpStructEndOmitEmptyNumber: + p := load(ctxptr, code.Idx) + v := ptrToNumber(p + uintptr(code.Offset)) + if v != "" { + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, v) + if err != nil { + return nil, err + } + b = appendStructEnd(ctx, code, bb) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndNumberString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p+uintptr(code.Offset))) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyNumberString: + p := load(ctxptr, code.Idx) + v := ptrToNumber(p + uintptr(code.Offset)) + if v != "" { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, v) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndNumberPtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyNumberPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = appendStructEnd(ctx, code, bb) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndNumberPtrString: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = append(bb, '"') + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyNumberPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpEnd: + goto END + } + } +END: + return b, nil +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_color/debug_vm.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_color/debug_vm.go new file mode 100644 index 00000000..925f61ed --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_color/debug_vm.go @@ -0,0 +1,35 @@ +package vm_color + +import ( + "fmt" + + "github.com/goccy/go-json/internal/encoder" +) + +func DebugRun(ctx *encoder.RuntimeContext, b []byte, codeSet *encoder.OpcodeSet) ([]byte, error) { + var code *encoder.Opcode + if (ctx.Option.Flag & encoder.HTMLEscapeOption) != 0 { + code = codeSet.EscapeKeyCode + } else { + code = codeSet.NoescapeKeyCode + } + + defer func() { + if err := recover(); err != nil { + w := ctx.Option.DebugOut + fmt.Fprintln(w, "=============[DEBUG]===============") + fmt.Fprintln(w, "* [TYPE]") + fmt.Fprintln(w, codeSet.Type) + fmt.Fprintf(w, "\n") + fmt.Fprintln(w, "* [ALL OPCODE]") + fmt.Fprintln(w, code.Dump()) + fmt.Fprintf(w, "\n") + fmt.Fprintln(w, "* [CONTEXT]") + fmt.Fprintf(w, "%+v\n", ctx) + fmt.Fprintln(w, "===================================") + panic(err) + } + }() + + return Run(ctx, b, codeSet) +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_color/hack.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_color/hack.go new file mode 100644 index 00000000..12ec56c5 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_color/hack.go @@ -0,0 +1,9 @@ +package vm_color + +import ( + // HACK: compile order + // `vm`, `vm_indent`, `vm_color`, `vm_color_indent` packages uses a lot of memory to compile, + // so forcibly make dependencies and avoid compiling in concurrent. + // dependency order: vm => vm_indent => vm_color => vm_color_indent + _ "github.com/goccy/go-json/internal/encoder/vm_color_indent" +) diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_color/util.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_color/util.go new file mode 100644 index 00000000..33f29aee --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_color/util.go @@ -0,0 +1,274 @@ +package vm_color + +import ( + "encoding/json" + "fmt" + "unsafe" + + "github.com/goccy/go-json/internal/encoder" + "github.com/goccy/go-json/internal/runtime" +) + +const uintptrSize = 4 << (^uintptr(0) >> 63) + +var ( + errUnsupportedValue = encoder.ErrUnsupportedValue + errUnsupportedFloat = encoder.ErrUnsupportedFloat + mapiterinit = encoder.MapIterInit + mapiterkey = encoder.MapIterKey + mapitervalue = encoder.MapIterValue + mapiternext = encoder.MapIterNext + maplen = encoder.MapLen +) + +type emptyInterface struct { + typ *runtime.Type + ptr unsafe.Pointer +} + +type nonEmptyInterface struct { + itab *struct { + ityp *runtime.Type // static interface type + typ *runtime.Type // dynamic concrete type + // unused fields... + } + ptr unsafe.Pointer +} + +func errUnimplementedOp(op encoder.OpType) error { + return fmt.Errorf("encoder: opcode %s has not been implemented", op) +} + +func load(base uintptr, idx uint32) uintptr { + addr := base + uintptr(idx) + return **(**uintptr)(unsafe.Pointer(&addr)) +} + +func store(base uintptr, idx uint32, p uintptr) { + addr := base + uintptr(idx) + **(**uintptr)(unsafe.Pointer(&addr)) = p +} + +func loadNPtr(base uintptr, idx uint32, ptrNum uint8) uintptr { + addr := base + uintptr(idx) + p := **(**uintptr)(unsafe.Pointer(&addr)) + for i := uint8(0); i < ptrNum; i++ { + if p == 0 { + return 0 + } + p = ptrToPtr(p) + } + return p +} + +func ptrToUint64(p uintptr, bitSize uint8) uint64 { + switch bitSize { + case 8: + return (uint64)(**(**uint8)(unsafe.Pointer(&p))) + case 16: + return (uint64)(**(**uint16)(unsafe.Pointer(&p))) + case 32: + return (uint64)(**(**uint32)(unsafe.Pointer(&p))) + case 64: + return **(**uint64)(unsafe.Pointer(&p)) + } + return 0 +} +func ptrToFloat32(p uintptr) float32 { return **(**float32)(unsafe.Pointer(&p)) } +func ptrToFloat64(p uintptr) float64 { return **(**float64)(unsafe.Pointer(&p)) } +func ptrToBool(p uintptr) bool { return **(**bool)(unsafe.Pointer(&p)) } +func ptrToBytes(p uintptr) []byte { return **(**[]byte)(unsafe.Pointer(&p)) } +func ptrToNumber(p uintptr) json.Number { return **(**json.Number)(unsafe.Pointer(&p)) } +func ptrToString(p uintptr) string { return **(**string)(unsafe.Pointer(&p)) } +func ptrToSlice(p uintptr) *runtime.SliceHeader { return *(**runtime.SliceHeader)(unsafe.Pointer(&p)) } +func ptrToPtr(p uintptr) uintptr { + return uintptr(**(**unsafe.Pointer)(unsafe.Pointer(&p))) +} +func ptrToNPtr(p uintptr, ptrNum uint8) uintptr { + for i := uint8(0); i < ptrNum; i++ { + if p == 0 { + return 0 + } + p = ptrToPtr(p) + } + return p +} + +func ptrToUnsafePtr(p uintptr) unsafe.Pointer { + return *(*unsafe.Pointer)(unsafe.Pointer(&p)) +} +func ptrToInterface(code *encoder.Opcode, p uintptr) interface{} { + return *(*interface{})(unsafe.Pointer(&emptyInterface{ + typ: code.Type, + ptr: *(*unsafe.Pointer)(unsafe.Pointer(&p)), + })) +} + +func appendInt(ctx *encoder.RuntimeContext, b []byte, p uintptr, code *encoder.Opcode) []byte { + format := ctx.Option.ColorScheme.Int + b = append(b, format.Header...) + b = encoder.AppendInt(ctx, b, p, code) + return append(b, format.Footer...) +} + +func appendUint(ctx *encoder.RuntimeContext, b []byte, p uintptr, code *encoder.Opcode) []byte { + format := ctx.Option.ColorScheme.Uint + b = append(b, format.Header...) + b = encoder.AppendUint(ctx, b, p, code) + return append(b, format.Footer...) +} + +func appendFloat32(ctx *encoder.RuntimeContext, b []byte, v float32) []byte { + format := ctx.Option.ColorScheme.Float + b = append(b, format.Header...) + b = encoder.AppendFloat32(ctx, b, v) + return append(b, format.Footer...) +} + +func appendFloat64(ctx *encoder.RuntimeContext, b []byte, v float64) []byte { + format := ctx.Option.ColorScheme.Float + b = append(b, format.Header...) + b = encoder.AppendFloat64(ctx, b, v) + return append(b, format.Footer...) +} + +func appendString(ctx *encoder.RuntimeContext, b []byte, v string) []byte { + format := ctx.Option.ColorScheme.String + b = append(b, format.Header...) + b = encoder.AppendString(ctx, b, v) + return append(b, format.Footer...) +} + +func appendByteSlice(ctx *encoder.RuntimeContext, b []byte, src []byte) []byte { + format := ctx.Option.ColorScheme.Binary + b = append(b, format.Header...) + b = encoder.AppendByteSlice(ctx, b, src) + return append(b, format.Footer...) +} + +func appendNumber(ctx *encoder.RuntimeContext, b []byte, n json.Number) ([]byte, error) { + format := ctx.Option.ColorScheme.Int + b = append(b, format.Header...) + bb, err := encoder.AppendNumber(ctx, b, n) + if err != nil { + return nil, err + } + return append(bb, format.Footer...), nil +} + +func appendBool(ctx *encoder.RuntimeContext, b []byte, v bool) []byte { + format := ctx.Option.ColorScheme.Bool + b = append(b, format.Header...) + if v { + b = append(b, "true"...) + } else { + b = append(b, "false"...) + } + return append(b, format.Footer...) +} + +func appendNull(ctx *encoder.RuntimeContext, b []byte) []byte { + format := ctx.Option.ColorScheme.Null + b = append(b, format.Header...) + b = append(b, "null"...) + return append(b, format.Footer...) +} + +func appendComma(_ *encoder.RuntimeContext, b []byte) []byte { + return append(b, ',') +} + +func appendNullComma(ctx *encoder.RuntimeContext, b []byte) []byte { + format := ctx.Option.ColorScheme.Null + b = append(b, format.Header...) + b = append(b, "null"...) + return append(append(b, format.Footer...), ',') +} + +func appendColon(_ *encoder.RuntimeContext, b []byte) []byte { + last := len(b) - 1 + b[last] = ':' + return b +} + +func appendMapKeyValue(_ *encoder.RuntimeContext, _ *encoder.Opcode, b, key, value []byte) []byte { + b = append(b, key[:len(key)-1]...) + b = append(b, ':') + return append(b, value...) +} + +func appendMapEnd(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte { + last := len(b) - 1 + b[last] = '}' + b = append(b, ',') + return b +} + +func appendMarshalJSON(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte, v interface{}) ([]byte, error) { + return encoder.AppendMarshalJSON(ctx, code, b, v) +} + +func appendMarshalText(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte, v interface{}) ([]byte, error) { + format := ctx.Option.ColorScheme.String + b = append(b, format.Header...) + bb, err := encoder.AppendMarshalText(ctx, code, b, v) + if err != nil { + return nil, err + } + return append(bb, format.Footer...), nil +} + +func appendArrayHead(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte { + return append(b, '[') +} + +func appendArrayEnd(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte { + last := len(b) - 1 + b[last] = ']' + return append(b, ',') +} + +func appendEmptyArray(_ *encoder.RuntimeContext, b []byte) []byte { + return append(b, '[', ']', ',') +} + +func appendEmptyObject(_ *encoder.RuntimeContext, b []byte) []byte { + return append(b, '{', '}', ',') +} + +func appendObjectEnd(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte { + last := len(b) - 1 + b[last] = '}' + return append(b, ',') +} + +func appendStructHead(_ *encoder.RuntimeContext, b []byte) []byte { + return append(b, '{') +} + +func appendStructKey(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte { + format := ctx.Option.ColorScheme.ObjectKey + b = append(b, format.Header...) + b = append(b, code.Key[:len(code.Key)-1]...) + b = append(b, format.Footer...) + + return append(b, ':') +} + +func appendStructEnd(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte { + return append(b, '}', ',') +} + +func appendStructEndSkipLast(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte { + last := len(b) - 1 + if b[last] == ',' { + b[last] = '}' + return appendComma(ctx, b) + } + return appendStructEnd(ctx, code, b) +} + +func restoreIndent(_ *encoder.RuntimeContext, _ *encoder.Opcode, _ uintptr) {} +func storeIndent(_ uintptr, _ *encoder.Opcode, _ uintptr) {} +func appendMapKeyIndent(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte { return b } +func appendArrayElemIndent(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte { return b } diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_color/vm.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_color/vm.go new file mode 100644 index 00000000..a63e83e5 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_color/vm.go @@ -0,0 +1,4859 @@ +// Code generated by internal/cmd/generator. DO NOT EDIT! +package vm_color + +import ( + "math" + "reflect" + "sort" + "unsafe" + + "github.com/goccy/go-json/internal/encoder" + "github.com/goccy/go-json/internal/runtime" +) + +func Run(ctx *encoder.RuntimeContext, b []byte, codeSet *encoder.OpcodeSet) ([]byte, error) { + recursiveLevel := 0 + ptrOffset := uintptr(0) + ctxptr := ctx.Ptr() + var code *encoder.Opcode + if (ctx.Option.Flag & encoder.HTMLEscapeOption) != 0 { + code = codeSet.EscapeKeyCode + } else { + code = codeSet.NoescapeKeyCode + } + + for { + switch code.Op { + default: + return nil, errUnimplementedOp(code.Op) + case encoder.OpPtr: + p := load(ctxptr, code.Idx) + code = code.Next + store(ctxptr, code.Idx, ptrToPtr(p)) + case encoder.OpIntPtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpInt: + b = appendInt(ctx, b, load(ctxptr, code.Idx), code) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpUintPtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpUint: + b = appendUint(ctx, b, load(ctxptr, code.Idx), code) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpIntString: + b = append(b, '"') + b = appendInt(ctx, b, load(ctxptr, code.Idx), code) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpUintString: + b = append(b, '"') + b = appendUint(ctx, b, load(ctxptr, code.Idx), code) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpFloat32Ptr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + b = appendComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpFloat32: + b = appendFloat32(ctx, b, ptrToFloat32(load(ctxptr, code.Idx))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpFloat64Ptr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpFloat64: + v := ptrToFloat64(load(ctxptr, code.Idx)) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStringPtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpString: + b = appendString(ctx, b, ptrToString(load(ctxptr, code.Idx))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpBoolPtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpBool: + b = appendBool(ctx, b, ptrToBool(load(ctxptr, code.Idx))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpBytesPtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpBytes: + b = appendByteSlice(ctx, b, ptrToBytes(load(ctxptr, code.Idx))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpNumberPtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpNumber: + bb, err := appendNumber(ctx, b, ptrToNumber(load(ctxptr, code.Idx))) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + code = code.Next + case encoder.OpInterfacePtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpInterface: + p := load(ctxptr, code.Idx) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + if recursiveLevel > encoder.StartDetectingCyclesAfter { + for _, seen := range ctx.SeenPtr { + if p == seen { + return nil, errUnsupportedValue(code, p) + } + } + } + ctx.SeenPtr = append(ctx.SeenPtr, p) + var ( + typ *runtime.Type + ifacePtr unsafe.Pointer + ) + up := ptrToUnsafePtr(p) + if code.Flags&encoder.NonEmptyInterfaceFlags != 0 { + iface := (*nonEmptyInterface)(up) + ifacePtr = iface.ptr + if iface.itab != nil { + typ = iface.itab.typ + } + } else { + iface := (*emptyInterface)(up) + ifacePtr = iface.ptr + typ = iface.typ + } + if ifacePtr == nil { + isDirectedNil := typ != nil && typ.Kind() == reflect.Struct && !runtime.IfaceIndir(typ) + if !isDirectedNil { + b = appendNullComma(ctx, b) + code = code.Next + break + } + } + ctx.KeepRefs = append(ctx.KeepRefs, up) + ifaceCodeSet, err := encoder.CompileToGetCodeSet(ctx, uintptr(unsafe.Pointer(typ))) + if err != nil { + return nil, err + } + + totalLength := uintptr(code.Length) + 3 + nextTotalLength := uintptr(ifaceCodeSet.CodeLength) + 3 + + var c *encoder.Opcode + if (ctx.Option.Flag & encoder.HTMLEscapeOption) != 0 { + c = ifaceCodeSet.InterfaceEscapeKeyCode + } else { + c = ifaceCodeSet.InterfaceNoescapeKeyCode + } + curlen := uintptr(len(ctx.Ptrs)) + offsetNum := ptrOffset / uintptrSize + oldOffset := ptrOffset + ptrOffset += totalLength * uintptrSize + oldBaseIndent := ctx.BaseIndent + ctx.BaseIndent += code.Indent + + newLen := offsetNum + totalLength + nextTotalLength + if curlen < newLen { + ctx.Ptrs = append(ctx.Ptrs, make([]uintptr, newLen-curlen)...) + } + ctxptr = ctx.Ptr() + ptrOffset // assign new ctxptr + + end := ifaceCodeSet.EndCode + store(ctxptr, c.Idx, uintptr(ifacePtr)) + store(ctxptr, end.Idx, oldOffset) + store(ctxptr, end.ElemIdx, uintptr(unsafe.Pointer(code.Next))) + storeIndent(ctxptr, end, uintptr(oldBaseIndent)) + code = c + recursiveLevel++ + case encoder.OpInterfaceEnd: + recursiveLevel-- + + // restore ctxptr + offset := load(ctxptr, code.Idx) + restoreIndent(ctx, code, ctxptr) + ctx.SeenPtr = ctx.SeenPtr[:len(ctx.SeenPtr)-1] + + codePtr := load(ctxptr, code.ElemIdx) + code = (*encoder.Opcode)(ptrToUnsafePtr(codePtr)) + ctxptr = ctx.Ptr() + offset + ptrOffset = offset + case encoder.OpMarshalJSONPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, ptrToPtr(p)) + fallthrough + case encoder.OpMarshalJSON: + p := load(ctxptr, code.Idx) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + if (code.Flags&encoder.IsNilableTypeFlags) != 0 && (code.Flags&encoder.IndirectFlags) != 0 { + p = ptrToPtr(p) + } + bb, err := appendMarshalJSON(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + code = code.Next + case encoder.OpMarshalTextPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, ptrToPtr(p)) + fallthrough + case encoder.OpMarshalText: + p := load(ctxptr, code.Idx) + if p == 0 { + b = append(b, `""`...) + b = appendComma(ctx, b) + code = code.Next + break + } + if (code.Flags&encoder.IsNilableTypeFlags) != 0 && (code.Flags&encoder.IndirectFlags) != 0 { + p = ptrToPtr(p) + } + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + code = code.Next + case encoder.OpSlicePtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.End.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpSlice: + p := load(ctxptr, code.Idx) + slice := ptrToSlice(p) + if p == 0 || slice.Data == nil { + b = appendNullComma(ctx, b) + code = code.End.Next + break + } + store(ctxptr, code.ElemIdx, 0) + store(ctxptr, code.Length, uintptr(slice.Len)) + store(ctxptr, code.Idx, uintptr(slice.Data)) + if slice.Len > 0 { + b = appendArrayHead(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, uintptr(slice.Data)) + } else { + b = appendEmptyArray(ctx, b) + code = code.End.Next + } + case encoder.OpSliceElem: + idx := load(ctxptr, code.ElemIdx) + length := load(ctxptr, code.Length) + idx++ + if idx < length { + b = appendArrayElemIndent(ctx, code, b) + store(ctxptr, code.ElemIdx, idx) + data := load(ctxptr, code.Idx) + size := uintptr(code.Size) + code = code.Next + store(ctxptr, code.Idx, data+idx*size) + } else { + b = appendArrayEnd(ctx, code, b) + code = code.End.Next + } + case encoder.OpArrayPtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.End.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpArray: + p := load(ctxptr, code.Idx) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.End.Next + break + } + if code.Length > 0 { + b = appendArrayHead(ctx, code, b) + store(ctxptr, code.ElemIdx, 0) + code = code.Next + store(ctxptr, code.Idx, p) + } else { + b = appendEmptyArray(ctx, b) + code = code.End.Next + } + case encoder.OpArrayElem: + idx := load(ctxptr, code.ElemIdx) + idx++ + if idx < uintptr(code.Length) { + b = appendArrayElemIndent(ctx, code, b) + store(ctxptr, code.ElemIdx, idx) + p := load(ctxptr, code.Idx) + size := uintptr(code.Size) + code = code.Next + store(ctxptr, code.Idx, p+idx*size) + } else { + b = appendArrayEnd(ctx, code, b) + code = code.End.Next + } + case encoder.OpMapPtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.End.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpMap: + p := load(ctxptr, code.Idx) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.End.Next + break + } + uptr := ptrToUnsafePtr(p) + mlen := maplen(uptr) + if mlen <= 0 { + b = appendEmptyObject(ctx, b) + code = code.End.Next + break + } + b = appendStructHead(ctx, b) + unorderedMap := (ctx.Option.Flag & encoder.UnorderedMapOption) != 0 + mapCtx := encoder.NewMapContext(mlen, unorderedMap) + mapiterinit(code.Type, uptr, &mapCtx.Iter) + store(ctxptr, code.Idx, uintptr(unsafe.Pointer(mapCtx))) + ctx.KeepRefs = append(ctx.KeepRefs, unsafe.Pointer(mapCtx)) + if unorderedMap { + b = appendMapKeyIndent(ctx, code.Next, b) + } else { + mapCtx.Start = len(b) + mapCtx.First = len(b) + } + key := mapiterkey(&mapCtx.Iter) + store(ctxptr, code.Next.Idx, uintptr(key)) + code = code.Next + case encoder.OpMapKey: + mapCtx := (*encoder.MapContext)(ptrToUnsafePtr(load(ctxptr, code.Idx))) + idx := mapCtx.Idx + idx++ + if (ctx.Option.Flag & encoder.UnorderedMapOption) != 0 { + if idx < mapCtx.Len { + b = appendMapKeyIndent(ctx, code, b) + mapCtx.Idx = int(idx) + key := mapiterkey(&mapCtx.Iter) + store(ctxptr, code.Next.Idx, uintptr(key)) + code = code.Next + } else { + b = appendObjectEnd(ctx, code, b) + encoder.ReleaseMapContext(mapCtx) + code = code.End.Next + } + } else { + mapCtx.Slice.Items[mapCtx.Idx].Value = b[mapCtx.Start:len(b)] + if idx < mapCtx.Len { + mapCtx.Idx = int(idx) + mapCtx.Start = len(b) + key := mapiterkey(&mapCtx.Iter) + store(ctxptr, code.Next.Idx, uintptr(key)) + code = code.Next + } else { + code = code.End + } + } + case encoder.OpMapValue: + mapCtx := (*encoder.MapContext)(ptrToUnsafePtr(load(ctxptr, code.Idx))) + if (ctx.Option.Flag & encoder.UnorderedMapOption) != 0 { + b = appendColon(ctx, b) + } else { + mapCtx.Slice.Items[mapCtx.Idx].Key = b[mapCtx.Start:len(b)] + mapCtx.Start = len(b) + } + value := mapitervalue(&mapCtx.Iter) + store(ctxptr, code.Next.Idx, uintptr(value)) + mapiternext(&mapCtx.Iter) + code = code.Next + case encoder.OpMapEnd: + // this operation only used by sorted map. + mapCtx := (*encoder.MapContext)(ptrToUnsafePtr(load(ctxptr, code.Idx))) + sort.Sort(mapCtx.Slice) + buf := mapCtx.Buf + for _, item := range mapCtx.Slice.Items { + buf = appendMapKeyValue(ctx, code, buf, item.Key, item.Value) + } + buf = appendMapEnd(ctx, code, buf) + b = b[:mapCtx.First] + b = append(b, buf...) + mapCtx.Buf = buf + encoder.ReleaseMapContext(mapCtx) + code = code.Next + case encoder.OpRecursivePtr: + p := load(ctxptr, code.Idx) + if p == 0 { + code = code.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpRecursive: + ptr := load(ctxptr, code.Idx) + if ptr != 0 { + if recursiveLevel > encoder.StartDetectingCyclesAfter { + for _, seen := range ctx.SeenPtr { + if ptr == seen { + return nil, errUnsupportedValue(code, ptr) + } + } + } + } + ctx.SeenPtr = append(ctx.SeenPtr, ptr) + c := code.Jmp.Code + curlen := uintptr(len(ctx.Ptrs)) + offsetNum := ptrOffset / uintptrSize + oldOffset := ptrOffset + ptrOffset += code.Jmp.CurLen * uintptrSize + oldBaseIndent := ctx.BaseIndent + indentDiffFromTop := c.Indent - 1 + ctx.BaseIndent += code.Indent - indentDiffFromTop + + newLen := offsetNum + code.Jmp.CurLen + code.Jmp.NextLen + if curlen < newLen { + ctx.Ptrs = append(ctx.Ptrs, make([]uintptr, newLen-curlen)...) + } + ctxptr = ctx.Ptr() + ptrOffset // assign new ctxptr + + store(ctxptr, c.Idx, ptr) + store(ctxptr, c.End.Next.Idx, oldOffset) + store(ctxptr, c.End.Next.ElemIdx, uintptr(unsafe.Pointer(code.Next))) + storeIndent(ctxptr, c.End.Next, uintptr(oldBaseIndent)) + code = c + recursiveLevel++ + case encoder.OpRecursiveEnd: + recursiveLevel-- + + // restore ctxptr + restoreIndent(ctx, code, ctxptr) + offset := load(ctxptr, code.Idx) + ctx.SeenPtr = ctx.SeenPtr[:len(ctx.SeenPtr)-1] + + codePtr := load(ctxptr, code.ElemIdx) + code = (*encoder.Opcode)(ptrToUnsafePtr(codePtr)) + ctxptr = ctx.Ptr() + offset + ptrOffset = offset + case encoder.OpStructPtrHead: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHead: + p := load(ctxptr, code.Idx) + if p == 0 && ((code.Flags&encoder.IndirectFlags) != 0 || code.Next.Op == encoder.OpStructEnd) { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if len(code.Key) > 0 { + if (code.Flags&encoder.IsTaggedKeyFlags) != 0 || code.Flags&encoder.AnonymousKeyFlags == 0 { + b = appendStructKey(ctx, code, b) + } + } + p += uintptr(code.Offset) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructPtrHeadOmitEmpty: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmpty: + p := load(ctxptr, code.Idx) + if p == 0 && ((code.Flags&encoder.IndirectFlags) != 0 || code.Next.Op == encoder.OpStructEnd) { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + p += uintptr(code.Offset) + if p == 0 || (ptrToPtr(p) == 0 && (code.Flags&encoder.IsNextOpPtrTypeFlags) != 0) { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructPtrHeadInt: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadInt: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyInt: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyInt: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadIntString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadIntString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyIntString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyIntString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + p += uintptr(code.Offset) + u64 := ptrToUint64(p, code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p, code) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadIntPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadIntPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendInt(ctx, b, p, code) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyIntPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyIntPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p, code) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadIntPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadIntPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendInt(ctx, b, p, code) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyIntPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyIntPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p, code) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadUint: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadUint: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyUint: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyUint: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadUintString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadUintString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyUintString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyUintString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadUintPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadUintPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendUint(ctx, b, p, code) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyUintPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyUintPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p, code) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadUintPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadUintPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendUint(ctx, b, p, code) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyUintPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyUintPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p, code) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadFloat32: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadFloat32: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, ptrToFloat32(p+uintptr(code.Offset))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyFloat32: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyFloat32: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToFloat32(p + uintptr(code.Offset)) + if v == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadFloat32String: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadFloat32String: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p+uintptr(code.Offset))) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyFloat32String: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyFloat32String: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToFloat32(p + uintptr(code.Offset)) + if v == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadFloat32Ptr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadFloat32Ptr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendFloat32(ctx, b, ptrToFloat32(p)) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyFloat32Ptr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyFloat32Ptr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadFloat32PtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadFloat32PtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyFloat32PtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyFloat32PtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadFloat64: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadFloat64: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + v := ptrToFloat64(p + uintptr(code.Offset)) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = appendFloat64(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyFloat64: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyFloat64: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToFloat64(p + uintptr(code.Offset)) + if v == 0 { + code = code.NextField + } else { + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = appendFloat64(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadFloat64String: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadFloat64String: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToFloat64(p + uintptr(code.Offset)) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyFloat64String: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyFloat64String: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToFloat64(p + uintptr(code.Offset)) + if v == 0 { + code = code.NextField + } else { + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadFloat64Ptr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadFloat64Ptr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyFloat64Ptr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyFloat64Ptr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadFloat64PtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadFloat64PtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyFloat64PtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyFloat64PtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNull(ctx, b) + b = appendComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, ptrToString(p+uintptr(code.Offset))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToString(p + uintptr(code.Offset)) + if v == "" { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadStringString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadStringString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, ptrToString(p+uintptr(code.Offset))))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyStringString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyStringString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToString(p + uintptr(code.Offset)) + if v == "" { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, v))) + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadStringPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadStringPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendString(ctx, b, ptrToString(p)) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyStringPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyStringPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, ptrToString(p)) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadStringPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadStringPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendString(ctx, b, string(appendString(ctx, []byte{}, ptrToString(p)))) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyStringPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyStringPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, ptrToString(p)))) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadBool: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadBool: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, ptrToBool(p+uintptr(code.Offset))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyBool: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyBool: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToBool(p + uintptr(code.Offset)) + if v { + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + } else { + code = code.NextField + } + case encoder.OpStructPtrHeadBoolString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadBoolString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p+uintptr(code.Offset))) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyBoolString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyBoolString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToBool(p + uintptr(code.Offset)) + if v { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + } else { + code = code.NextField + } + case encoder.OpStructPtrHeadBoolPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadBoolPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendBool(ctx, b, ptrToBool(p)) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyBoolPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyBoolPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, ptrToBool(p)) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadBoolPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadBoolPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p)) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyBoolPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyBoolPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p)) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadBytes: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadBytes: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, ptrToBytes(p+uintptr(code.Offset))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyBytes: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyBytes: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToBytes(p + uintptr(code.Offset)) + if len(v) == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadBytesPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadBytesPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendByteSlice(ctx, b, ptrToBytes(p)) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyBytesPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyBytesPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, ptrToBytes(p)) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadNumber: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadNumber: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, ptrToNumber(p+uintptr(code.Offset))) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyNumber: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyNumber: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToNumber(p + uintptr(code.Offset)) + if v == "" { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, v) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + code = code.Next + } + case encoder.OpStructPtrHeadNumberString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadNumberString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p+uintptr(code.Offset))) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyNumberString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyNumberString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToNumber(p + uintptr(code.Offset)) + if v == "" { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, v) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadNumberPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadNumberPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyNumberPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyNumberPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + } + code = code.Next + case encoder.OpStructPtrHeadNumberPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadNumberPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = append(bb, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyNumberPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyNumberPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadArray, encoder.OpStructPtrHeadSlice: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadArray, encoder.OpStructHeadSlice: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + p += uintptr(code.Offset) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructPtrHeadOmitEmptyArray: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyArray: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + p += uintptr(code.Offset) + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructPtrHeadOmitEmptySlice: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptySlice: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + p += uintptr(code.Offset) + slice := ptrToSlice(p) + if slice.Len == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructPtrHeadArrayPtr, encoder.OpStructPtrHeadSlicePtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadArrayPtr, encoder.OpStructHeadSlicePtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNullComma(ctx, b) + code = code.NextField + } else { + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructPtrHeadOmitEmptyArrayPtr, encoder.OpStructPtrHeadOmitEmptySlicePtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyArrayPtr, encoder.OpStructHeadOmitEmptySlicePtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructPtrHeadMap: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadMap: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if p != 0 && (code.Flags&encoder.IndirectFlags) != 0 { + p = ptrToPtr(p + uintptr(code.Offset)) + } + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructPtrHeadOmitEmptyMap: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyMap: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if p != 0 && (code.Flags&encoder.IndirectFlags) != 0 { + p = ptrToPtr(p + uintptr(code.Offset)) + } + if maplen(ptrToUnsafePtr(p)) == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructPtrHeadMapPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadMapPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.NextField + break + } + p = ptrToPtr(p + uintptr(code.Offset)) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.NextField + } else { + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p, code.PtrNum) + } + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructPtrHeadOmitEmptyMapPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyMapPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if p == 0 { + code = code.NextField + break + } + p = ptrToPtr(p + uintptr(code.Offset)) + if p == 0 { + code = code.NextField + } else { + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p, code.PtrNum) + } + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructPtrHeadMarshalJSON: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if (code.Flags & encoder.IndirectFlags) != 0 { + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadMarshalJSON: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + p += uintptr(code.Offset) + if (code.Flags & encoder.IsNilableTypeFlags) != 0 { + if (code.Flags&encoder.IndirectFlags) != 0 || code.Op == encoder.OpStructPtrHeadMarshalJSON { + p = ptrToPtr(p) + } + } + if p == 0 && (code.Flags&encoder.NilCheckFlags) != 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendMarshalJSON(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyMarshalJSON: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if (code.Flags & encoder.IndirectFlags) != 0 { + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyMarshalJSON: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + p += uintptr(code.Offset) + if (code.Flags & encoder.IsNilableTypeFlags) != 0 { + if (code.Flags&encoder.IndirectFlags) != 0 || code.Op == encoder.OpStructPtrHeadOmitEmptyMarshalJSON { + p = ptrToPtr(p) + } + } + iface := ptrToInterface(code, p) + if (code.Flags&encoder.NilCheckFlags) != 0 && encoder.IsNilForMarshaler(iface) { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + bb, err := appendMarshalJSON(ctx, code, b, iface) + if err != nil { + return nil, err + } + b = bb + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadMarshalJSONPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadMarshalJSONPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendMarshalJSON(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyMarshalJSONPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyMarshalJSONPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if p == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + bb, err := appendMarshalJSON(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadMarshalText: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if (code.Flags & encoder.IndirectFlags) != 0 { + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadMarshalText: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + p += uintptr(code.Offset) + if (code.Flags & encoder.IsNilableTypeFlags) != 0 { + if (code.Flags&encoder.IndirectFlags) != 0 || code.Op == encoder.OpStructPtrHeadMarshalText { + p = ptrToPtr(p) + } + } + if p == 0 && (code.Flags&encoder.NilCheckFlags) != 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyMarshalText: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if (code.Flags & encoder.IndirectFlags) != 0 { + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyMarshalText: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + p += uintptr(code.Offset) + if (code.Flags & encoder.IsNilableTypeFlags) != 0 { + if (code.Flags&encoder.IndirectFlags) != 0 || code.Op == encoder.OpStructPtrHeadOmitEmptyMarshalText { + p = ptrToPtr(p) + } + } + if p == 0 && (code.Flags&encoder.NilCheckFlags) != 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadMarshalTextPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadMarshalTextPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyMarshalTextPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyMarshalTextPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if p == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructField: + if code.Flags&encoder.IsTaggedKeyFlags != 0 || code.Flags&encoder.AnonymousKeyFlags == 0 { + b = appendStructKey(ctx, code, b) + } + p := load(ctxptr, code.Idx) + uintptr(code.Offset) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldOmitEmpty: + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + if ptrToPtr(p) == 0 && (code.Flags&encoder.IsNextOpPtrTypeFlags) != 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructFieldInt: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyInt: + p := load(ctxptr, code.Idx) + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldIntString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyIntString: + p := load(ctxptr, code.Idx) + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldIntPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendInt(ctx, b, p, code) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyIntPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p, code) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldIntPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendInt(ctx, b, p, code) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyIntPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p, code) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldUint: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyUint: + p := load(ctxptr, code.Idx) + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldUintString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyUintString: + p := load(ctxptr, code.Idx) + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldUintPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendUint(ctx, b, p, code) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyUintPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p, code) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldUintPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendUint(ctx, b, p, code) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyUintPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p, code) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldFloat32: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, ptrToFloat32(p+uintptr(code.Offset))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyFloat32: + p := load(ctxptr, code.Idx) + v := ptrToFloat32(p + uintptr(code.Offset)) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, v) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldFloat32String: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p+uintptr(code.Offset))) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyFloat32String: + p := load(ctxptr, code.Idx) + v := ptrToFloat32(p + uintptr(code.Offset)) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldFloat32Ptr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendFloat32(ctx, b, ptrToFloat32(p)) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyFloat32Ptr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldFloat32PtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyFloat32PtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldFloat64: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + v := ptrToFloat64(p + uintptr(code.Offset)) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyFloat64: + p := load(ctxptr, code.Idx) + v := ptrToFloat64(p + uintptr(code.Offset)) + if v != 0 { + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = appendFloat64(ctx, b, v) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldFloat64String: + p := load(ctxptr, code.Idx) + v := ptrToFloat64(p + uintptr(code.Offset)) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyFloat64String: + p := load(ctxptr, code.Idx) + v := ptrToFloat64(p + uintptr(code.Offset)) + if v != 0 { + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldFloat64Ptr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyFloat64Ptr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldFloat64PtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = append(b, '"') + b = appendFloat64(ctx, b, v) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyFloat64PtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, ptrToString(p+uintptr(code.Offset))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyString: + p := load(ctxptr, code.Idx) + v := ptrToString(p + uintptr(code.Offset)) + if v != "" { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, v) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldStringString: + p := load(ctxptr, code.Idx) + s := ptrToString(p + uintptr(code.Offset)) + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, s))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyStringString: + p := load(ctxptr, code.Idx) + v := ptrToString(p + uintptr(code.Offset)) + if v != "" { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, v))) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldStringPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendString(ctx, b, ptrToString(p)) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyStringPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, ptrToString(p)) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldStringPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendString(ctx, b, string(appendString(ctx, []byte{}, ptrToString(p)))) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyStringPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, ptrToString(p)))) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldBool: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, ptrToBool(p+uintptr(code.Offset))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyBool: + p := load(ctxptr, code.Idx) + v := ptrToBool(p + uintptr(code.Offset)) + if v { + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, v) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldBoolString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p+uintptr(code.Offset))) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyBoolString: + p := load(ctxptr, code.Idx) + v := ptrToBool(p + uintptr(code.Offset)) + if v { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldBoolPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendBool(ctx, b, ptrToBool(p)) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyBoolPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, ptrToBool(p)) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldBoolPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p)) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyBoolPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p)) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldBytes: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, ptrToBytes(p+uintptr(code.Offset))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyBytes: + p := load(ctxptr, code.Idx) + v := ptrToBytes(p + uintptr(code.Offset)) + if len(v) > 0 { + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, v) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldBytesPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendByteSlice(ctx, b, ptrToBytes(p)) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyBytesPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, ptrToBytes(p)) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldNumber: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, ptrToNumber(p+uintptr(code.Offset))) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + code = code.Next + case encoder.OpStructFieldOmitEmptyNumber: + p := load(ctxptr, code.Idx) + v := ptrToNumber(p + uintptr(code.Offset)) + if v != "" { + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, v) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + } + code = code.Next + case encoder.OpStructFieldNumberString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p+uintptr(code.Offset))) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyNumberString: + p := load(ctxptr, code.Idx) + v := ptrToNumber(p + uintptr(code.Offset)) + if v != "" { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, v) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldNumberPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyNumberPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + } + code = code.Next + case encoder.OpStructFieldNumberPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = append(bb, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyNumberPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldMarshalJSON: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + p += uintptr(code.Offset) + if (code.Flags & encoder.IsNilableTypeFlags) != 0 { + p = ptrToPtr(p) + } + if p == 0 && (code.Flags&encoder.NilCheckFlags) != 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendMarshalJSON(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyMarshalJSON: + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + if (code.Flags & encoder.IsNilableTypeFlags) != 0 { + p = ptrToPtr(p) + } + if p == 0 && (code.Flags&encoder.NilCheckFlags) != 0 { + code = code.NextField + break + } + iface := ptrToInterface(code, p) + if (code.Flags&encoder.NilCheckFlags) != 0 && encoder.IsNilForMarshaler(iface) { + code = code.NextField + break + } + b = appendStructKey(ctx, code, b) + bb, err := appendMarshalJSON(ctx, code, b, iface) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + code = code.Next + case encoder.OpStructFieldMarshalJSONPtr: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendMarshalJSON(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyMarshalJSONPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + bb, err := appendMarshalJSON(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + } + code = code.Next + case encoder.OpStructFieldMarshalText: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + p += uintptr(code.Offset) + if (code.Flags & encoder.IsNilableTypeFlags) != 0 { + p = ptrToPtr(p) + } + if p == 0 && (code.Flags&encoder.NilCheckFlags) != 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyMarshalText: + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + if (code.Flags & encoder.IsNilableTypeFlags) != 0 { + p = ptrToPtr(p) + } + if p == 0 && (code.Flags&encoder.NilCheckFlags) != 0 { + code = code.NextField + break + } + b = appendStructKey(ctx, code, b) + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + code = code.Next + case encoder.OpStructFieldMarshalTextPtr: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyMarshalTextPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + } + code = code.Next + case encoder.OpStructFieldArray: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldOmitEmptyArray: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldArrayPtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldOmitEmptyArrayPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } else { + code = code.NextField + } + case encoder.OpStructFieldSlice: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldOmitEmptySlice: + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + slice := ptrToSlice(p) + if slice.Len == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructFieldSlicePtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldOmitEmptySlicePtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } else { + code = code.NextField + } + case encoder.OpStructFieldMap: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToPtr(p + uintptr(code.Offset)) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldOmitEmptyMap: + p := load(ctxptr, code.Idx) + p = ptrToPtr(p + uintptr(code.Offset)) + if p == 0 || maplen(ptrToUnsafePtr(p)) == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructFieldMapPtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToPtr(p + uintptr(code.Offset)) + if p != 0 { + p = ptrToNPtr(p, code.PtrNum) + } + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldOmitEmptyMapPtr: + p := load(ctxptr, code.Idx) + p = ptrToPtr(p + uintptr(code.Offset)) + if p != 0 { + p = ptrToNPtr(p, code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } else { + code = code.NextField + } + case encoder.OpStructFieldStruct: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldOmitEmptyStruct: + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + if ptrToPtr(p) == 0 && (code.Flags&encoder.IsNextOpPtrTypeFlags) != 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructEnd: + b = appendStructEndSkipLast(ctx, code, b) + code = code.Next + case encoder.OpStructEndInt: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyInt: + p := load(ctxptr, code.Idx) + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndIntString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyIntString: + p := load(ctxptr, code.Idx) + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndIntPtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendInt(ctx, b, p, code) + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyIntPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p, code) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndIntPtrString: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendInt(ctx, b, p, code) + b = append(b, '"') + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyIntPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p, code) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndUint: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyUint: + p := load(ctxptr, code.Idx) + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndUintString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyUintString: + p := load(ctxptr, code.Idx) + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndUintPtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendUint(ctx, b, p, code) + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyUintPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p, code) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndUintPtrString: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendUint(ctx, b, p, code) + b = append(b, '"') + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyUintPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p, code) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndFloat32: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, ptrToFloat32(p+uintptr(code.Offset))) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyFloat32: + p := load(ctxptr, code.Idx) + v := ptrToFloat32(p + uintptr(code.Offset)) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, v) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndFloat32String: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p+uintptr(code.Offset))) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyFloat32String: + p := load(ctxptr, code.Idx) + v := ptrToFloat32(p + uintptr(code.Offset)) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, v) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndFloat32Ptr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendFloat32(ctx, b, ptrToFloat32(p)) + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyFloat32Ptr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndFloat32PtrString: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = append(b, '"') + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyFloat32PtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndFloat64: + p := load(ctxptr, code.Idx) + v := ptrToFloat64(p + uintptr(code.Offset)) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = appendFloat64(ctx, b, v) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyFloat64: + p := load(ctxptr, code.Idx) + v := ptrToFloat64(p + uintptr(code.Offset)) + if v != 0 { + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = appendFloat64(ctx, b, v) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndFloat64String: + p := load(ctxptr, code.Idx) + v := ptrToFloat64(p + uintptr(code.Offset)) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyFloat64String: + p := load(ctxptr, code.Idx) + v := ptrToFloat64(p + uintptr(code.Offset)) + if v != 0 { + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndFloat64Ptr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + b = appendStructEnd(ctx, code, b) + code = code.Next + break + } + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyFloat64Ptr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndFloat64PtrString: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = append(b, '"') + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyFloat64PtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = append(b, '"') + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, ptrToString(p+uintptr(code.Offset))) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyString: + p := load(ctxptr, code.Idx) + v := ptrToString(p + uintptr(code.Offset)) + if v != "" { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, v) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndStringString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + s := ptrToString(p + uintptr(code.Offset)) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, s))) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyStringString: + p := load(ctxptr, code.Idx) + v := ptrToString(p + uintptr(code.Offset)) + if v != "" { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, v))) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndStringPtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendString(ctx, b, ptrToString(p)) + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyStringPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, ptrToString(p)) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndStringPtrString: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendString(ctx, b, string(appendString(ctx, []byte{}, ptrToString(p)))) + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyStringPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, ptrToString(p)))) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndBool: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, ptrToBool(p+uintptr(code.Offset))) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyBool: + p := load(ctxptr, code.Idx) + v := ptrToBool(p + uintptr(code.Offset)) + if v { + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, v) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndBoolString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p+uintptr(code.Offset))) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyBoolString: + p := load(ctxptr, code.Idx) + v := ptrToBool(p + uintptr(code.Offset)) + if v { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, v) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndBoolPtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendBool(ctx, b, ptrToBool(p)) + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyBoolPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, ptrToBool(p)) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndBoolPtrString: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p)) + b = append(b, '"') + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyBoolPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p)) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndBytes: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, ptrToBytes(p+uintptr(code.Offset))) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyBytes: + p := load(ctxptr, code.Idx) + v := ptrToBytes(p + uintptr(code.Offset)) + if len(v) > 0 { + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, v) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndBytesPtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendByteSlice(ctx, b, ptrToBytes(p)) + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyBytesPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, ptrToBytes(p)) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndNumber: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, ptrToNumber(p+uintptr(code.Offset))) + if err != nil { + return nil, err + } + b = appendStructEnd(ctx, code, bb) + code = code.Next + case encoder.OpStructEndOmitEmptyNumber: + p := load(ctxptr, code.Idx) + v := ptrToNumber(p + uintptr(code.Offset)) + if v != "" { + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, v) + if err != nil { + return nil, err + } + b = appendStructEnd(ctx, code, bb) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndNumberString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p+uintptr(code.Offset))) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyNumberString: + p := load(ctxptr, code.Idx) + v := ptrToNumber(p + uintptr(code.Offset)) + if v != "" { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, v) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndNumberPtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyNumberPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = appendStructEnd(ctx, code, bb) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndNumberPtrString: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = append(bb, '"') + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyNumberPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpEnd: + goto END + } + } +END: + return b, nil +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_color_indent/debug_vm.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_color_indent/debug_vm.go new file mode 100644 index 00000000..dd4cd489 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_color_indent/debug_vm.go @@ -0,0 +1,35 @@ +package vm_color_indent + +import ( + "fmt" + + "github.com/goccy/go-json/internal/encoder" +) + +func DebugRun(ctx *encoder.RuntimeContext, b []byte, codeSet *encoder.OpcodeSet) ([]byte, error) { + var code *encoder.Opcode + if (ctx.Option.Flag & encoder.HTMLEscapeOption) != 0 { + code = codeSet.EscapeKeyCode + } else { + code = codeSet.NoescapeKeyCode + } + + defer func() { + if err := recover(); err != nil { + w := ctx.Option.DebugOut + fmt.Fprintln(w, "=============[DEBUG]===============") + fmt.Fprintln(w, "* [TYPE]") + fmt.Fprintln(w, codeSet.Type) + fmt.Fprintf(w, "\n") + fmt.Fprintln(w, "* [ALL OPCODE]") + fmt.Fprintln(w, code.Dump()) + fmt.Fprintf(w, "\n") + fmt.Fprintln(w, "* [CONTEXT]") + fmt.Fprintf(w, "%+v\n", ctx) + fmt.Fprintln(w, "===================================") + panic(err) + } + }() + + return Run(ctx, b, codeSet) +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_color_indent/util.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_color_indent/util.go new file mode 100644 index 00000000..60e4a8ed --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_color_indent/util.go @@ -0,0 +1,296 @@ +package vm_color_indent + +import ( + "encoding/json" + "fmt" + "unsafe" + + "github.com/goccy/go-json/internal/encoder" + "github.com/goccy/go-json/internal/runtime" +) + +const uintptrSize = 4 << (^uintptr(0) >> 63) + +var ( + appendIndent = encoder.AppendIndent + appendStructEnd = encoder.AppendStructEndIndent + errUnsupportedValue = encoder.ErrUnsupportedValue + errUnsupportedFloat = encoder.ErrUnsupportedFloat + mapiterinit = encoder.MapIterInit + mapiterkey = encoder.MapIterKey + mapitervalue = encoder.MapIterValue + mapiternext = encoder.MapIterNext + maplen = encoder.MapLen +) + +type emptyInterface struct { + typ *runtime.Type + ptr unsafe.Pointer +} + +type nonEmptyInterface struct { + itab *struct { + ityp *runtime.Type // static interface type + typ *runtime.Type // dynamic concrete type + // unused fields... + } + ptr unsafe.Pointer +} + +func errUnimplementedOp(op encoder.OpType) error { + return fmt.Errorf("encoder (indent): opcode %s has not been implemented", op) +} + +func load(base uintptr, idx uint32) uintptr { + addr := base + uintptr(idx) + return **(**uintptr)(unsafe.Pointer(&addr)) +} + +func store(base uintptr, idx uint32, p uintptr) { + addr := base + uintptr(idx) + **(**uintptr)(unsafe.Pointer(&addr)) = p +} + +func loadNPtr(base uintptr, idx uint32, ptrNum uint8) uintptr { + addr := base + uintptr(idx) + p := **(**uintptr)(unsafe.Pointer(&addr)) + for i := uint8(0); i < ptrNum; i++ { + if p == 0 { + return 0 + } + p = ptrToPtr(p) + } + return p +} + +func ptrToUint64(p uintptr, bitSize uint8) uint64 { + switch bitSize { + case 8: + return (uint64)(**(**uint8)(unsafe.Pointer(&p))) + case 16: + return (uint64)(**(**uint16)(unsafe.Pointer(&p))) + case 32: + return (uint64)(**(**uint32)(unsafe.Pointer(&p))) + case 64: + return **(**uint64)(unsafe.Pointer(&p)) + } + return 0 +} + +func ptrToFloat32(p uintptr) float32 { return **(**float32)(unsafe.Pointer(&p)) } +func ptrToFloat64(p uintptr) float64 { return **(**float64)(unsafe.Pointer(&p)) } +func ptrToBool(p uintptr) bool { return **(**bool)(unsafe.Pointer(&p)) } +func ptrToBytes(p uintptr) []byte { return **(**[]byte)(unsafe.Pointer(&p)) } +func ptrToNumber(p uintptr) json.Number { return **(**json.Number)(unsafe.Pointer(&p)) } +func ptrToString(p uintptr) string { return **(**string)(unsafe.Pointer(&p)) } +func ptrToSlice(p uintptr) *runtime.SliceHeader { return *(**runtime.SliceHeader)(unsafe.Pointer(&p)) } +func ptrToPtr(p uintptr) uintptr { + return uintptr(**(**unsafe.Pointer)(unsafe.Pointer(&p))) +} +func ptrToNPtr(p uintptr, ptrNum uint8) uintptr { + for i := uint8(0); i < ptrNum; i++ { + if p == 0 { + return 0 + } + p = ptrToPtr(p) + } + return p +} + +func ptrToUnsafePtr(p uintptr) unsafe.Pointer { + return *(*unsafe.Pointer)(unsafe.Pointer(&p)) +} +func ptrToInterface(code *encoder.Opcode, p uintptr) interface{} { + return *(*interface{})(unsafe.Pointer(&emptyInterface{ + typ: code.Type, + ptr: *(*unsafe.Pointer)(unsafe.Pointer(&p)), + })) +} + +func appendInt(ctx *encoder.RuntimeContext, b []byte, p uintptr, code *encoder.Opcode) []byte { + format := ctx.Option.ColorScheme.Int + b = append(b, format.Header...) + b = encoder.AppendInt(ctx, b, p, code) + return append(b, format.Footer...) +} + +func appendUint(ctx *encoder.RuntimeContext, b []byte, p uintptr, code *encoder.Opcode) []byte { + format := ctx.Option.ColorScheme.Uint + b = append(b, format.Header...) + b = encoder.AppendUint(ctx, b, p, code) + return append(b, format.Footer...) +} + +func appendFloat32(ctx *encoder.RuntimeContext, b []byte, v float32) []byte { + format := ctx.Option.ColorScheme.Float + b = append(b, format.Header...) + b = encoder.AppendFloat32(ctx, b, v) + return append(b, format.Footer...) +} + +func appendFloat64(ctx *encoder.RuntimeContext, b []byte, v float64) []byte { + format := ctx.Option.ColorScheme.Float + b = append(b, format.Header...) + b = encoder.AppendFloat64(ctx, b, v) + return append(b, format.Footer...) +} + +func appendString(ctx *encoder.RuntimeContext, b []byte, v string) []byte { + format := ctx.Option.ColorScheme.String + b = append(b, format.Header...) + b = encoder.AppendString(ctx, b, v) + return append(b, format.Footer...) +} + +func appendByteSlice(ctx *encoder.RuntimeContext, b []byte, src []byte) []byte { + format := ctx.Option.ColorScheme.Binary + b = append(b, format.Header...) + b = encoder.AppendByteSlice(ctx, b, src) + return append(b, format.Footer...) +} + +func appendNumber(ctx *encoder.RuntimeContext, b []byte, n json.Number) ([]byte, error) { + format := ctx.Option.ColorScheme.Int + b = append(b, format.Header...) + bb, err := encoder.AppendNumber(ctx, b, n) + if err != nil { + return nil, err + } + return append(bb, format.Footer...), nil +} + +func appendBool(ctx *encoder.RuntimeContext, b []byte, v bool) []byte { + format := ctx.Option.ColorScheme.Bool + b = append(b, format.Header...) + if v { + b = append(b, "true"...) + } else { + b = append(b, "false"...) + } + return append(b, format.Footer...) +} + +func appendNull(ctx *encoder.RuntimeContext, b []byte) []byte { + format := ctx.Option.ColorScheme.Null + b = append(b, format.Header...) + b = append(b, "null"...) + return append(b, format.Footer...) +} + +func appendComma(_ *encoder.RuntimeContext, b []byte) []byte { + return append(b, ',', '\n') +} + +func appendNullComma(ctx *encoder.RuntimeContext, b []byte) []byte { + format := ctx.Option.ColorScheme.Null + b = append(b, format.Header...) + b = append(b, "null"...) + return append(append(b, format.Footer...), ',', '\n') +} + +func appendColon(_ *encoder.RuntimeContext, b []byte) []byte { + return append(b, ':', ' ') +} + +func appendMapKeyValue(ctx *encoder.RuntimeContext, code *encoder.Opcode, b, key, value []byte) []byte { + b = appendIndent(ctx, b, code.Indent+1) + b = append(b, key...) + b[len(b)-2] = ':' + b[len(b)-1] = ' ' + return append(b, value...) +} + +func appendMapEnd(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte { + b = b[:len(b)-2] + b = append(b, '\n') + b = appendIndent(ctx, b, code.Indent) + return append(b, '}', ',', '\n') +} + +func appendArrayHead(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte { + b = append(b, '[', '\n') + return appendIndent(ctx, b, code.Indent+1) +} + +func appendArrayEnd(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte { + b = b[:len(b)-2] + b = append(b, '\n') + b = appendIndent(ctx, b, code.Indent) + return append(b, ']', ',', '\n') +} + +func appendEmptyArray(_ *encoder.RuntimeContext, b []byte) []byte { + return append(b, '[', ']', ',', '\n') +} + +func appendEmptyObject(_ *encoder.RuntimeContext, b []byte) []byte { + return append(b, '{', '}', ',', '\n') +} + +func appendObjectEnd(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte { + last := len(b) - 1 + b[last] = '\n' + b = appendIndent(ctx, b, code.Indent-1) + return append(b, '}', ',', '\n') +} + +func appendMarshalJSON(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte, v interface{}) ([]byte, error) { + return encoder.AppendMarshalJSONIndent(ctx, code, b, v) +} + +func appendMarshalText(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte, v interface{}) ([]byte, error) { + format := ctx.Option.ColorScheme.String + b = append(b, format.Header...) + bb, err := encoder.AppendMarshalTextIndent(ctx, code, b, v) + if err != nil { + return nil, err + } + return append(bb, format.Footer...), nil +} + +func appendStructHead(_ *encoder.RuntimeContext, b []byte) []byte { + return append(b, '{', '\n') +} + +func appendStructKey(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte { + b = appendIndent(ctx, b, code.Indent) + + format := ctx.Option.ColorScheme.ObjectKey + b = append(b, format.Header...) + b = append(b, code.Key[:len(code.Key)-1]...) + b = append(b, format.Footer...) + + return append(b, ':', ' ') +} + +func appendStructEndSkipLast(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte { + last := len(b) - 1 + if b[last-1] == '{' { + b[last] = '}' + } else { + if b[last] == '\n' { + // to remove ',' and '\n' characters + b = b[:len(b)-2] + } + b = append(b, '\n') + b = appendIndent(ctx, b, code.Indent-1) + b = append(b, '}') + } + return appendComma(ctx, b) +} + +func restoreIndent(ctx *encoder.RuntimeContext, code *encoder.Opcode, ctxptr uintptr) { + ctx.BaseIndent = uint32(load(ctxptr, code.Length)) +} + +func storeIndent(ctxptr uintptr, code *encoder.Opcode, indent uintptr) { + store(ctxptr, code.Length, indent) +} + +func appendArrayElemIndent(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte { + return appendIndent(ctx, b, code.Indent+1) +} + +func appendMapKeyIndent(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte { + return appendIndent(ctx, b, code.Indent) +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_color_indent/vm.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_color_indent/vm.go new file mode 100644 index 00000000..3b4e22e5 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_color_indent/vm.go @@ -0,0 +1,4859 @@ +// Code generated by internal/cmd/generator. DO NOT EDIT! +package vm_color_indent + +import ( + "math" + "reflect" + "sort" + "unsafe" + + "github.com/goccy/go-json/internal/encoder" + "github.com/goccy/go-json/internal/runtime" +) + +func Run(ctx *encoder.RuntimeContext, b []byte, codeSet *encoder.OpcodeSet) ([]byte, error) { + recursiveLevel := 0 + ptrOffset := uintptr(0) + ctxptr := ctx.Ptr() + var code *encoder.Opcode + if (ctx.Option.Flag & encoder.HTMLEscapeOption) != 0 { + code = codeSet.EscapeKeyCode + } else { + code = codeSet.NoescapeKeyCode + } + + for { + switch code.Op { + default: + return nil, errUnimplementedOp(code.Op) + case encoder.OpPtr: + p := load(ctxptr, code.Idx) + code = code.Next + store(ctxptr, code.Idx, ptrToPtr(p)) + case encoder.OpIntPtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpInt: + b = appendInt(ctx, b, load(ctxptr, code.Idx), code) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpUintPtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpUint: + b = appendUint(ctx, b, load(ctxptr, code.Idx), code) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpIntString: + b = append(b, '"') + b = appendInt(ctx, b, load(ctxptr, code.Idx), code) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpUintString: + b = append(b, '"') + b = appendUint(ctx, b, load(ctxptr, code.Idx), code) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpFloat32Ptr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + b = appendComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpFloat32: + b = appendFloat32(ctx, b, ptrToFloat32(load(ctxptr, code.Idx))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpFloat64Ptr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpFloat64: + v := ptrToFloat64(load(ctxptr, code.Idx)) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStringPtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpString: + b = appendString(ctx, b, ptrToString(load(ctxptr, code.Idx))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpBoolPtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpBool: + b = appendBool(ctx, b, ptrToBool(load(ctxptr, code.Idx))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpBytesPtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpBytes: + b = appendByteSlice(ctx, b, ptrToBytes(load(ctxptr, code.Idx))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpNumberPtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpNumber: + bb, err := appendNumber(ctx, b, ptrToNumber(load(ctxptr, code.Idx))) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + code = code.Next + case encoder.OpInterfacePtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpInterface: + p := load(ctxptr, code.Idx) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + if recursiveLevel > encoder.StartDetectingCyclesAfter { + for _, seen := range ctx.SeenPtr { + if p == seen { + return nil, errUnsupportedValue(code, p) + } + } + } + ctx.SeenPtr = append(ctx.SeenPtr, p) + var ( + typ *runtime.Type + ifacePtr unsafe.Pointer + ) + up := ptrToUnsafePtr(p) + if code.Flags&encoder.NonEmptyInterfaceFlags != 0 { + iface := (*nonEmptyInterface)(up) + ifacePtr = iface.ptr + if iface.itab != nil { + typ = iface.itab.typ + } + } else { + iface := (*emptyInterface)(up) + ifacePtr = iface.ptr + typ = iface.typ + } + if ifacePtr == nil { + isDirectedNil := typ != nil && typ.Kind() == reflect.Struct && !runtime.IfaceIndir(typ) + if !isDirectedNil { + b = appendNullComma(ctx, b) + code = code.Next + break + } + } + ctx.KeepRefs = append(ctx.KeepRefs, up) + ifaceCodeSet, err := encoder.CompileToGetCodeSet(ctx, uintptr(unsafe.Pointer(typ))) + if err != nil { + return nil, err + } + + totalLength := uintptr(code.Length) + 3 + nextTotalLength := uintptr(ifaceCodeSet.CodeLength) + 3 + + var c *encoder.Opcode + if (ctx.Option.Flag & encoder.HTMLEscapeOption) != 0 { + c = ifaceCodeSet.InterfaceEscapeKeyCode + } else { + c = ifaceCodeSet.InterfaceNoescapeKeyCode + } + curlen := uintptr(len(ctx.Ptrs)) + offsetNum := ptrOffset / uintptrSize + oldOffset := ptrOffset + ptrOffset += totalLength * uintptrSize + oldBaseIndent := ctx.BaseIndent + ctx.BaseIndent += code.Indent + + newLen := offsetNum + totalLength + nextTotalLength + if curlen < newLen { + ctx.Ptrs = append(ctx.Ptrs, make([]uintptr, newLen-curlen)...) + } + ctxptr = ctx.Ptr() + ptrOffset // assign new ctxptr + + end := ifaceCodeSet.EndCode + store(ctxptr, c.Idx, uintptr(ifacePtr)) + store(ctxptr, end.Idx, oldOffset) + store(ctxptr, end.ElemIdx, uintptr(unsafe.Pointer(code.Next))) + storeIndent(ctxptr, end, uintptr(oldBaseIndent)) + code = c + recursiveLevel++ + case encoder.OpInterfaceEnd: + recursiveLevel-- + + // restore ctxptr + offset := load(ctxptr, code.Idx) + restoreIndent(ctx, code, ctxptr) + ctx.SeenPtr = ctx.SeenPtr[:len(ctx.SeenPtr)-1] + + codePtr := load(ctxptr, code.ElemIdx) + code = (*encoder.Opcode)(ptrToUnsafePtr(codePtr)) + ctxptr = ctx.Ptr() + offset + ptrOffset = offset + case encoder.OpMarshalJSONPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, ptrToPtr(p)) + fallthrough + case encoder.OpMarshalJSON: + p := load(ctxptr, code.Idx) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + if (code.Flags&encoder.IsNilableTypeFlags) != 0 && (code.Flags&encoder.IndirectFlags) != 0 { + p = ptrToPtr(p) + } + bb, err := appendMarshalJSON(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + code = code.Next + case encoder.OpMarshalTextPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, ptrToPtr(p)) + fallthrough + case encoder.OpMarshalText: + p := load(ctxptr, code.Idx) + if p == 0 { + b = append(b, `""`...) + b = appendComma(ctx, b) + code = code.Next + break + } + if (code.Flags&encoder.IsNilableTypeFlags) != 0 && (code.Flags&encoder.IndirectFlags) != 0 { + p = ptrToPtr(p) + } + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + code = code.Next + case encoder.OpSlicePtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.End.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpSlice: + p := load(ctxptr, code.Idx) + slice := ptrToSlice(p) + if p == 0 || slice.Data == nil { + b = appendNullComma(ctx, b) + code = code.End.Next + break + } + store(ctxptr, code.ElemIdx, 0) + store(ctxptr, code.Length, uintptr(slice.Len)) + store(ctxptr, code.Idx, uintptr(slice.Data)) + if slice.Len > 0 { + b = appendArrayHead(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, uintptr(slice.Data)) + } else { + b = appendEmptyArray(ctx, b) + code = code.End.Next + } + case encoder.OpSliceElem: + idx := load(ctxptr, code.ElemIdx) + length := load(ctxptr, code.Length) + idx++ + if idx < length { + b = appendArrayElemIndent(ctx, code, b) + store(ctxptr, code.ElemIdx, idx) + data := load(ctxptr, code.Idx) + size := uintptr(code.Size) + code = code.Next + store(ctxptr, code.Idx, data+idx*size) + } else { + b = appendArrayEnd(ctx, code, b) + code = code.End.Next + } + case encoder.OpArrayPtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.End.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpArray: + p := load(ctxptr, code.Idx) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.End.Next + break + } + if code.Length > 0 { + b = appendArrayHead(ctx, code, b) + store(ctxptr, code.ElemIdx, 0) + code = code.Next + store(ctxptr, code.Idx, p) + } else { + b = appendEmptyArray(ctx, b) + code = code.End.Next + } + case encoder.OpArrayElem: + idx := load(ctxptr, code.ElemIdx) + idx++ + if idx < uintptr(code.Length) { + b = appendArrayElemIndent(ctx, code, b) + store(ctxptr, code.ElemIdx, idx) + p := load(ctxptr, code.Idx) + size := uintptr(code.Size) + code = code.Next + store(ctxptr, code.Idx, p+idx*size) + } else { + b = appendArrayEnd(ctx, code, b) + code = code.End.Next + } + case encoder.OpMapPtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.End.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpMap: + p := load(ctxptr, code.Idx) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.End.Next + break + } + uptr := ptrToUnsafePtr(p) + mlen := maplen(uptr) + if mlen <= 0 { + b = appendEmptyObject(ctx, b) + code = code.End.Next + break + } + b = appendStructHead(ctx, b) + unorderedMap := (ctx.Option.Flag & encoder.UnorderedMapOption) != 0 + mapCtx := encoder.NewMapContext(mlen, unorderedMap) + mapiterinit(code.Type, uptr, &mapCtx.Iter) + store(ctxptr, code.Idx, uintptr(unsafe.Pointer(mapCtx))) + ctx.KeepRefs = append(ctx.KeepRefs, unsafe.Pointer(mapCtx)) + if unorderedMap { + b = appendMapKeyIndent(ctx, code.Next, b) + } else { + mapCtx.Start = len(b) + mapCtx.First = len(b) + } + key := mapiterkey(&mapCtx.Iter) + store(ctxptr, code.Next.Idx, uintptr(key)) + code = code.Next + case encoder.OpMapKey: + mapCtx := (*encoder.MapContext)(ptrToUnsafePtr(load(ctxptr, code.Idx))) + idx := mapCtx.Idx + idx++ + if (ctx.Option.Flag & encoder.UnorderedMapOption) != 0 { + if idx < mapCtx.Len { + b = appendMapKeyIndent(ctx, code, b) + mapCtx.Idx = int(idx) + key := mapiterkey(&mapCtx.Iter) + store(ctxptr, code.Next.Idx, uintptr(key)) + code = code.Next + } else { + b = appendObjectEnd(ctx, code, b) + encoder.ReleaseMapContext(mapCtx) + code = code.End.Next + } + } else { + mapCtx.Slice.Items[mapCtx.Idx].Value = b[mapCtx.Start:len(b)] + if idx < mapCtx.Len { + mapCtx.Idx = int(idx) + mapCtx.Start = len(b) + key := mapiterkey(&mapCtx.Iter) + store(ctxptr, code.Next.Idx, uintptr(key)) + code = code.Next + } else { + code = code.End + } + } + case encoder.OpMapValue: + mapCtx := (*encoder.MapContext)(ptrToUnsafePtr(load(ctxptr, code.Idx))) + if (ctx.Option.Flag & encoder.UnorderedMapOption) != 0 { + b = appendColon(ctx, b) + } else { + mapCtx.Slice.Items[mapCtx.Idx].Key = b[mapCtx.Start:len(b)] + mapCtx.Start = len(b) + } + value := mapitervalue(&mapCtx.Iter) + store(ctxptr, code.Next.Idx, uintptr(value)) + mapiternext(&mapCtx.Iter) + code = code.Next + case encoder.OpMapEnd: + // this operation only used by sorted map. + mapCtx := (*encoder.MapContext)(ptrToUnsafePtr(load(ctxptr, code.Idx))) + sort.Sort(mapCtx.Slice) + buf := mapCtx.Buf + for _, item := range mapCtx.Slice.Items { + buf = appendMapKeyValue(ctx, code, buf, item.Key, item.Value) + } + buf = appendMapEnd(ctx, code, buf) + b = b[:mapCtx.First] + b = append(b, buf...) + mapCtx.Buf = buf + encoder.ReleaseMapContext(mapCtx) + code = code.Next + case encoder.OpRecursivePtr: + p := load(ctxptr, code.Idx) + if p == 0 { + code = code.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpRecursive: + ptr := load(ctxptr, code.Idx) + if ptr != 0 { + if recursiveLevel > encoder.StartDetectingCyclesAfter { + for _, seen := range ctx.SeenPtr { + if ptr == seen { + return nil, errUnsupportedValue(code, ptr) + } + } + } + } + ctx.SeenPtr = append(ctx.SeenPtr, ptr) + c := code.Jmp.Code + curlen := uintptr(len(ctx.Ptrs)) + offsetNum := ptrOffset / uintptrSize + oldOffset := ptrOffset + ptrOffset += code.Jmp.CurLen * uintptrSize + oldBaseIndent := ctx.BaseIndent + indentDiffFromTop := c.Indent - 1 + ctx.BaseIndent += code.Indent - indentDiffFromTop + + newLen := offsetNum + code.Jmp.CurLen + code.Jmp.NextLen + if curlen < newLen { + ctx.Ptrs = append(ctx.Ptrs, make([]uintptr, newLen-curlen)...) + } + ctxptr = ctx.Ptr() + ptrOffset // assign new ctxptr + + store(ctxptr, c.Idx, ptr) + store(ctxptr, c.End.Next.Idx, oldOffset) + store(ctxptr, c.End.Next.ElemIdx, uintptr(unsafe.Pointer(code.Next))) + storeIndent(ctxptr, c.End.Next, uintptr(oldBaseIndent)) + code = c + recursiveLevel++ + case encoder.OpRecursiveEnd: + recursiveLevel-- + + // restore ctxptr + restoreIndent(ctx, code, ctxptr) + offset := load(ctxptr, code.Idx) + ctx.SeenPtr = ctx.SeenPtr[:len(ctx.SeenPtr)-1] + + codePtr := load(ctxptr, code.ElemIdx) + code = (*encoder.Opcode)(ptrToUnsafePtr(codePtr)) + ctxptr = ctx.Ptr() + offset + ptrOffset = offset + case encoder.OpStructPtrHead: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHead: + p := load(ctxptr, code.Idx) + if p == 0 && ((code.Flags&encoder.IndirectFlags) != 0 || code.Next.Op == encoder.OpStructEnd) { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if len(code.Key) > 0 { + if (code.Flags&encoder.IsTaggedKeyFlags) != 0 || code.Flags&encoder.AnonymousKeyFlags == 0 { + b = appendStructKey(ctx, code, b) + } + } + p += uintptr(code.Offset) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructPtrHeadOmitEmpty: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmpty: + p := load(ctxptr, code.Idx) + if p == 0 && ((code.Flags&encoder.IndirectFlags) != 0 || code.Next.Op == encoder.OpStructEnd) { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + p += uintptr(code.Offset) + if p == 0 || (ptrToPtr(p) == 0 && (code.Flags&encoder.IsNextOpPtrTypeFlags) != 0) { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructPtrHeadInt: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadInt: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyInt: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyInt: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadIntString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadIntString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyIntString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyIntString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + p += uintptr(code.Offset) + u64 := ptrToUint64(p, code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p, code) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadIntPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadIntPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendInt(ctx, b, p, code) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyIntPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyIntPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p, code) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadIntPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadIntPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendInt(ctx, b, p, code) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyIntPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyIntPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p, code) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadUint: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadUint: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyUint: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyUint: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadUintString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadUintString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyUintString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyUintString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadUintPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadUintPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendUint(ctx, b, p, code) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyUintPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyUintPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p, code) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadUintPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadUintPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendUint(ctx, b, p, code) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyUintPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyUintPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p, code) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadFloat32: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadFloat32: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, ptrToFloat32(p+uintptr(code.Offset))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyFloat32: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyFloat32: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToFloat32(p + uintptr(code.Offset)) + if v == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadFloat32String: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadFloat32String: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p+uintptr(code.Offset))) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyFloat32String: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyFloat32String: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToFloat32(p + uintptr(code.Offset)) + if v == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadFloat32Ptr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadFloat32Ptr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendFloat32(ctx, b, ptrToFloat32(p)) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyFloat32Ptr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyFloat32Ptr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadFloat32PtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadFloat32PtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyFloat32PtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyFloat32PtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadFloat64: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadFloat64: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + v := ptrToFloat64(p + uintptr(code.Offset)) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = appendFloat64(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyFloat64: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyFloat64: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToFloat64(p + uintptr(code.Offset)) + if v == 0 { + code = code.NextField + } else { + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = appendFloat64(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadFloat64String: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadFloat64String: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToFloat64(p + uintptr(code.Offset)) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyFloat64String: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyFloat64String: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToFloat64(p + uintptr(code.Offset)) + if v == 0 { + code = code.NextField + } else { + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadFloat64Ptr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadFloat64Ptr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyFloat64Ptr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyFloat64Ptr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadFloat64PtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadFloat64PtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyFloat64PtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyFloat64PtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNull(ctx, b) + b = appendComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, ptrToString(p+uintptr(code.Offset))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToString(p + uintptr(code.Offset)) + if v == "" { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadStringString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadStringString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, ptrToString(p+uintptr(code.Offset))))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyStringString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyStringString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToString(p + uintptr(code.Offset)) + if v == "" { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, v))) + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadStringPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadStringPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendString(ctx, b, ptrToString(p)) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyStringPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyStringPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, ptrToString(p)) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadStringPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadStringPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendString(ctx, b, string(appendString(ctx, []byte{}, ptrToString(p)))) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyStringPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyStringPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, ptrToString(p)))) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadBool: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadBool: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, ptrToBool(p+uintptr(code.Offset))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyBool: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyBool: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToBool(p + uintptr(code.Offset)) + if v { + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + } else { + code = code.NextField + } + case encoder.OpStructPtrHeadBoolString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadBoolString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p+uintptr(code.Offset))) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyBoolString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyBoolString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToBool(p + uintptr(code.Offset)) + if v { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + } else { + code = code.NextField + } + case encoder.OpStructPtrHeadBoolPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadBoolPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendBool(ctx, b, ptrToBool(p)) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyBoolPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyBoolPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, ptrToBool(p)) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadBoolPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadBoolPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p)) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyBoolPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyBoolPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p)) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadBytes: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadBytes: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, ptrToBytes(p+uintptr(code.Offset))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyBytes: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyBytes: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToBytes(p + uintptr(code.Offset)) + if len(v) == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadBytesPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadBytesPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendByteSlice(ctx, b, ptrToBytes(p)) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyBytesPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyBytesPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, ptrToBytes(p)) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadNumber: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadNumber: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, ptrToNumber(p+uintptr(code.Offset))) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyNumber: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyNumber: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToNumber(p + uintptr(code.Offset)) + if v == "" { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, v) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + code = code.Next + } + case encoder.OpStructPtrHeadNumberString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadNumberString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p+uintptr(code.Offset))) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyNumberString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyNumberString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToNumber(p + uintptr(code.Offset)) + if v == "" { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, v) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadNumberPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadNumberPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyNumberPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyNumberPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + } + code = code.Next + case encoder.OpStructPtrHeadNumberPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadNumberPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = append(bb, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyNumberPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyNumberPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadArray, encoder.OpStructPtrHeadSlice: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadArray, encoder.OpStructHeadSlice: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + p += uintptr(code.Offset) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructPtrHeadOmitEmptyArray: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyArray: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + p += uintptr(code.Offset) + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructPtrHeadOmitEmptySlice: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptySlice: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + p += uintptr(code.Offset) + slice := ptrToSlice(p) + if slice.Len == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructPtrHeadArrayPtr, encoder.OpStructPtrHeadSlicePtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadArrayPtr, encoder.OpStructHeadSlicePtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNullComma(ctx, b) + code = code.NextField + } else { + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructPtrHeadOmitEmptyArrayPtr, encoder.OpStructPtrHeadOmitEmptySlicePtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyArrayPtr, encoder.OpStructHeadOmitEmptySlicePtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructPtrHeadMap: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadMap: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if p != 0 && (code.Flags&encoder.IndirectFlags) != 0 { + p = ptrToPtr(p + uintptr(code.Offset)) + } + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructPtrHeadOmitEmptyMap: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyMap: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if p != 0 && (code.Flags&encoder.IndirectFlags) != 0 { + p = ptrToPtr(p + uintptr(code.Offset)) + } + if maplen(ptrToUnsafePtr(p)) == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructPtrHeadMapPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadMapPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.NextField + break + } + p = ptrToPtr(p + uintptr(code.Offset)) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.NextField + } else { + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p, code.PtrNum) + } + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructPtrHeadOmitEmptyMapPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyMapPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if p == 0 { + code = code.NextField + break + } + p = ptrToPtr(p + uintptr(code.Offset)) + if p == 0 { + code = code.NextField + } else { + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p, code.PtrNum) + } + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructPtrHeadMarshalJSON: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if (code.Flags & encoder.IndirectFlags) != 0 { + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadMarshalJSON: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + p += uintptr(code.Offset) + if (code.Flags & encoder.IsNilableTypeFlags) != 0 { + if (code.Flags&encoder.IndirectFlags) != 0 || code.Op == encoder.OpStructPtrHeadMarshalJSON { + p = ptrToPtr(p) + } + } + if p == 0 && (code.Flags&encoder.NilCheckFlags) != 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendMarshalJSON(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyMarshalJSON: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if (code.Flags & encoder.IndirectFlags) != 0 { + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyMarshalJSON: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + p += uintptr(code.Offset) + if (code.Flags & encoder.IsNilableTypeFlags) != 0 { + if (code.Flags&encoder.IndirectFlags) != 0 || code.Op == encoder.OpStructPtrHeadOmitEmptyMarshalJSON { + p = ptrToPtr(p) + } + } + iface := ptrToInterface(code, p) + if (code.Flags&encoder.NilCheckFlags) != 0 && encoder.IsNilForMarshaler(iface) { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + bb, err := appendMarshalJSON(ctx, code, b, iface) + if err != nil { + return nil, err + } + b = bb + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadMarshalJSONPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadMarshalJSONPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendMarshalJSON(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyMarshalJSONPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyMarshalJSONPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if p == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + bb, err := appendMarshalJSON(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadMarshalText: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if (code.Flags & encoder.IndirectFlags) != 0 { + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadMarshalText: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + p += uintptr(code.Offset) + if (code.Flags & encoder.IsNilableTypeFlags) != 0 { + if (code.Flags&encoder.IndirectFlags) != 0 || code.Op == encoder.OpStructPtrHeadMarshalText { + p = ptrToPtr(p) + } + } + if p == 0 && (code.Flags&encoder.NilCheckFlags) != 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyMarshalText: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if (code.Flags & encoder.IndirectFlags) != 0 { + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyMarshalText: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + p += uintptr(code.Offset) + if (code.Flags & encoder.IsNilableTypeFlags) != 0 { + if (code.Flags&encoder.IndirectFlags) != 0 || code.Op == encoder.OpStructPtrHeadOmitEmptyMarshalText { + p = ptrToPtr(p) + } + } + if p == 0 && (code.Flags&encoder.NilCheckFlags) != 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadMarshalTextPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadMarshalTextPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyMarshalTextPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyMarshalTextPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if p == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructField: + if code.Flags&encoder.IsTaggedKeyFlags != 0 || code.Flags&encoder.AnonymousKeyFlags == 0 { + b = appendStructKey(ctx, code, b) + } + p := load(ctxptr, code.Idx) + uintptr(code.Offset) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldOmitEmpty: + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + if ptrToPtr(p) == 0 && (code.Flags&encoder.IsNextOpPtrTypeFlags) != 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructFieldInt: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyInt: + p := load(ctxptr, code.Idx) + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldIntString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyIntString: + p := load(ctxptr, code.Idx) + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldIntPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendInt(ctx, b, p, code) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyIntPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p, code) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldIntPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendInt(ctx, b, p, code) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyIntPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p, code) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldUint: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyUint: + p := load(ctxptr, code.Idx) + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldUintString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyUintString: + p := load(ctxptr, code.Idx) + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldUintPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendUint(ctx, b, p, code) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyUintPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p, code) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldUintPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendUint(ctx, b, p, code) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyUintPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p, code) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldFloat32: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, ptrToFloat32(p+uintptr(code.Offset))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyFloat32: + p := load(ctxptr, code.Idx) + v := ptrToFloat32(p + uintptr(code.Offset)) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, v) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldFloat32String: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p+uintptr(code.Offset))) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyFloat32String: + p := load(ctxptr, code.Idx) + v := ptrToFloat32(p + uintptr(code.Offset)) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldFloat32Ptr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendFloat32(ctx, b, ptrToFloat32(p)) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyFloat32Ptr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldFloat32PtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyFloat32PtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldFloat64: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + v := ptrToFloat64(p + uintptr(code.Offset)) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyFloat64: + p := load(ctxptr, code.Idx) + v := ptrToFloat64(p + uintptr(code.Offset)) + if v != 0 { + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = appendFloat64(ctx, b, v) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldFloat64String: + p := load(ctxptr, code.Idx) + v := ptrToFloat64(p + uintptr(code.Offset)) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyFloat64String: + p := load(ctxptr, code.Idx) + v := ptrToFloat64(p + uintptr(code.Offset)) + if v != 0 { + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldFloat64Ptr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyFloat64Ptr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldFloat64PtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = append(b, '"') + b = appendFloat64(ctx, b, v) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyFloat64PtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, ptrToString(p+uintptr(code.Offset))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyString: + p := load(ctxptr, code.Idx) + v := ptrToString(p + uintptr(code.Offset)) + if v != "" { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, v) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldStringString: + p := load(ctxptr, code.Idx) + s := ptrToString(p + uintptr(code.Offset)) + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, s))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyStringString: + p := load(ctxptr, code.Idx) + v := ptrToString(p + uintptr(code.Offset)) + if v != "" { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, v))) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldStringPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendString(ctx, b, ptrToString(p)) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyStringPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, ptrToString(p)) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldStringPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendString(ctx, b, string(appendString(ctx, []byte{}, ptrToString(p)))) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyStringPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, ptrToString(p)))) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldBool: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, ptrToBool(p+uintptr(code.Offset))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyBool: + p := load(ctxptr, code.Idx) + v := ptrToBool(p + uintptr(code.Offset)) + if v { + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, v) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldBoolString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p+uintptr(code.Offset))) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyBoolString: + p := load(ctxptr, code.Idx) + v := ptrToBool(p + uintptr(code.Offset)) + if v { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldBoolPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendBool(ctx, b, ptrToBool(p)) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyBoolPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, ptrToBool(p)) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldBoolPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p)) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyBoolPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p)) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldBytes: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, ptrToBytes(p+uintptr(code.Offset))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyBytes: + p := load(ctxptr, code.Idx) + v := ptrToBytes(p + uintptr(code.Offset)) + if len(v) > 0 { + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, v) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldBytesPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendByteSlice(ctx, b, ptrToBytes(p)) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyBytesPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, ptrToBytes(p)) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldNumber: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, ptrToNumber(p+uintptr(code.Offset))) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + code = code.Next + case encoder.OpStructFieldOmitEmptyNumber: + p := load(ctxptr, code.Idx) + v := ptrToNumber(p + uintptr(code.Offset)) + if v != "" { + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, v) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + } + code = code.Next + case encoder.OpStructFieldNumberString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p+uintptr(code.Offset))) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyNumberString: + p := load(ctxptr, code.Idx) + v := ptrToNumber(p + uintptr(code.Offset)) + if v != "" { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, v) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldNumberPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyNumberPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + } + code = code.Next + case encoder.OpStructFieldNumberPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = append(bb, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyNumberPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldMarshalJSON: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + p += uintptr(code.Offset) + if (code.Flags & encoder.IsNilableTypeFlags) != 0 { + p = ptrToPtr(p) + } + if p == 0 && (code.Flags&encoder.NilCheckFlags) != 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendMarshalJSON(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyMarshalJSON: + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + if (code.Flags & encoder.IsNilableTypeFlags) != 0 { + p = ptrToPtr(p) + } + if p == 0 && (code.Flags&encoder.NilCheckFlags) != 0 { + code = code.NextField + break + } + iface := ptrToInterface(code, p) + if (code.Flags&encoder.NilCheckFlags) != 0 && encoder.IsNilForMarshaler(iface) { + code = code.NextField + break + } + b = appendStructKey(ctx, code, b) + bb, err := appendMarshalJSON(ctx, code, b, iface) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + code = code.Next + case encoder.OpStructFieldMarshalJSONPtr: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendMarshalJSON(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyMarshalJSONPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + bb, err := appendMarshalJSON(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + } + code = code.Next + case encoder.OpStructFieldMarshalText: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + p += uintptr(code.Offset) + if (code.Flags & encoder.IsNilableTypeFlags) != 0 { + p = ptrToPtr(p) + } + if p == 0 && (code.Flags&encoder.NilCheckFlags) != 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyMarshalText: + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + if (code.Flags & encoder.IsNilableTypeFlags) != 0 { + p = ptrToPtr(p) + } + if p == 0 && (code.Flags&encoder.NilCheckFlags) != 0 { + code = code.NextField + break + } + b = appendStructKey(ctx, code, b) + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + code = code.Next + case encoder.OpStructFieldMarshalTextPtr: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyMarshalTextPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + } + code = code.Next + case encoder.OpStructFieldArray: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldOmitEmptyArray: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldArrayPtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldOmitEmptyArrayPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } else { + code = code.NextField + } + case encoder.OpStructFieldSlice: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldOmitEmptySlice: + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + slice := ptrToSlice(p) + if slice.Len == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructFieldSlicePtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldOmitEmptySlicePtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } else { + code = code.NextField + } + case encoder.OpStructFieldMap: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToPtr(p + uintptr(code.Offset)) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldOmitEmptyMap: + p := load(ctxptr, code.Idx) + p = ptrToPtr(p + uintptr(code.Offset)) + if p == 0 || maplen(ptrToUnsafePtr(p)) == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructFieldMapPtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToPtr(p + uintptr(code.Offset)) + if p != 0 { + p = ptrToNPtr(p, code.PtrNum) + } + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldOmitEmptyMapPtr: + p := load(ctxptr, code.Idx) + p = ptrToPtr(p + uintptr(code.Offset)) + if p != 0 { + p = ptrToNPtr(p, code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } else { + code = code.NextField + } + case encoder.OpStructFieldStruct: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldOmitEmptyStruct: + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + if ptrToPtr(p) == 0 && (code.Flags&encoder.IsNextOpPtrTypeFlags) != 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructEnd: + b = appendStructEndSkipLast(ctx, code, b) + code = code.Next + case encoder.OpStructEndInt: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyInt: + p := load(ctxptr, code.Idx) + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndIntString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyIntString: + p := load(ctxptr, code.Idx) + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndIntPtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendInt(ctx, b, p, code) + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyIntPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p, code) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndIntPtrString: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendInt(ctx, b, p, code) + b = append(b, '"') + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyIntPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p, code) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndUint: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyUint: + p := load(ctxptr, code.Idx) + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndUintString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyUintString: + p := load(ctxptr, code.Idx) + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndUintPtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendUint(ctx, b, p, code) + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyUintPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p, code) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndUintPtrString: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendUint(ctx, b, p, code) + b = append(b, '"') + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyUintPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p, code) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndFloat32: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, ptrToFloat32(p+uintptr(code.Offset))) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyFloat32: + p := load(ctxptr, code.Idx) + v := ptrToFloat32(p + uintptr(code.Offset)) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, v) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndFloat32String: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p+uintptr(code.Offset))) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyFloat32String: + p := load(ctxptr, code.Idx) + v := ptrToFloat32(p + uintptr(code.Offset)) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, v) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndFloat32Ptr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendFloat32(ctx, b, ptrToFloat32(p)) + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyFloat32Ptr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndFloat32PtrString: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = append(b, '"') + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyFloat32PtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndFloat64: + p := load(ctxptr, code.Idx) + v := ptrToFloat64(p + uintptr(code.Offset)) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = appendFloat64(ctx, b, v) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyFloat64: + p := load(ctxptr, code.Idx) + v := ptrToFloat64(p + uintptr(code.Offset)) + if v != 0 { + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = appendFloat64(ctx, b, v) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndFloat64String: + p := load(ctxptr, code.Idx) + v := ptrToFloat64(p + uintptr(code.Offset)) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyFloat64String: + p := load(ctxptr, code.Idx) + v := ptrToFloat64(p + uintptr(code.Offset)) + if v != 0 { + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndFloat64Ptr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + b = appendStructEnd(ctx, code, b) + code = code.Next + break + } + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyFloat64Ptr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndFloat64PtrString: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = append(b, '"') + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyFloat64PtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = append(b, '"') + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, ptrToString(p+uintptr(code.Offset))) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyString: + p := load(ctxptr, code.Idx) + v := ptrToString(p + uintptr(code.Offset)) + if v != "" { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, v) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndStringString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + s := ptrToString(p + uintptr(code.Offset)) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, s))) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyStringString: + p := load(ctxptr, code.Idx) + v := ptrToString(p + uintptr(code.Offset)) + if v != "" { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, v))) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndStringPtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendString(ctx, b, ptrToString(p)) + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyStringPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, ptrToString(p)) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndStringPtrString: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendString(ctx, b, string(appendString(ctx, []byte{}, ptrToString(p)))) + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyStringPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, ptrToString(p)))) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndBool: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, ptrToBool(p+uintptr(code.Offset))) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyBool: + p := load(ctxptr, code.Idx) + v := ptrToBool(p + uintptr(code.Offset)) + if v { + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, v) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndBoolString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p+uintptr(code.Offset))) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyBoolString: + p := load(ctxptr, code.Idx) + v := ptrToBool(p + uintptr(code.Offset)) + if v { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, v) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndBoolPtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendBool(ctx, b, ptrToBool(p)) + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyBoolPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, ptrToBool(p)) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndBoolPtrString: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p)) + b = append(b, '"') + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyBoolPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p)) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndBytes: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, ptrToBytes(p+uintptr(code.Offset))) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyBytes: + p := load(ctxptr, code.Idx) + v := ptrToBytes(p + uintptr(code.Offset)) + if len(v) > 0 { + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, v) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndBytesPtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendByteSlice(ctx, b, ptrToBytes(p)) + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyBytesPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, ptrToBytes(p)) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndNumber: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, ptrToNumber(p+uintptr(code.Offset))) + if err != nil { + return nil, err + } + b = appendStructEnd(ctx, code, bb) + code = code.Next + case encoder.OpStructEndOmitEmptyNumber: + p := load(ctxptr, code.Idx) + v := ptrToNumber(p + uintptr(code.Offset)) + if v != "" { + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, v) + if err != nil { + return nil, err + } + b = appendStructEnd(ctx, code, bb) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndNumberString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p+uintptr(code.Offset))) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyNumberString: + p := load(ctxptr, code.Idx) + v := ptrToNumber(p + uintptr(code.Offset)) + if v != "" { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, v) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndNumberPtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyNumberPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = appendStructEnd(ctx, code, bb) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndNumberPtrString: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = append(bb, '"') + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyNumberPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpEnd: + goto END + } + } +END: + return b, nil +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_indent/debug_vm.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_indent/debug_vm.go new file mode 100644 index 00000000..99395388 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_indent/debug_vm.go @@ -0,0 +1,35 @@ +package vm_indent + +import ( + "fmt" + + "github.com/goccy/go-json/internal/encoder" +) + +func DebugRun(ctx *encoder.RuntimeContext, b []byte, codeSet *encoder.OpcodeSet) ([]byte, error) { + var code *encoder.Opcode + if (ctx.Option.Flag & encoder.HTMLEscapeOption) != 0 { + code = codeSet.EscapeKeyCode + } else { + code = codeSet.NoescapeKeyCode + } + + defer func() { + if err := recover(); err != nil { + w := ctx.Option.DebugOut + fmt.Fprintln(w, "=============[DEBUG]===============") + fmt.Fprintln(w, "* [TYPE]") + fmt.Fprintln(w, codeSet.Type) + fmt.Fprintf(w, "\n") + fmt.Fprintln(w, "* [ALL OPCODE]") + fmt.Fprintln(w, code.Dump()) + fmt.Fprintf(w, "\n") + fmt.Fprintln(w, "* [CONTEXT]") + fmt.Fprintf(w, "%+v\n", ctx) + fmt.Fprintln(w, "===================================") + panic(err) + } + }() + + return Run(ctx, b, codeSet) +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_indent/hack.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_indent/hack.go new file mode 100644 index 00000000..9e245bfe --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_indent/hack.go @@ -0,0 +1,9 @@ +package vm_indent + +import ( + // HACK: compile order + // `vm`, `vm_indent`, `vm_color`, `vm_color_indent` packages uses a lot of memory to compile, + // so forcibly make dependencies and avoid compiling in concurrent. + // dependency order: vm => vm_indent => vm_color => vm_color_indent + _ "github.com/goccy/go-json/internal/encoder/vm_color" +) diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_indent/util.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_indent/util.go new file mode 100644 index 00000000..fca8f185 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_indent/util.go @@ -0,0 +1,229 @@ +package vm_indent + +import ( + "encoding/json" + "fmt" + "unsafe" + + "github.com/goccy/go-json/internal/encoder" + "github.com/goccy/go-json/internal/runtime" +) + +const uintptrSize = 4 << (^uintptr(0) >> 63) + +var ( + appendInt = encoder.AppendInt + appendUint = encoder.AppendUint + appendFloat32 = encoder.AppendFloat32 + appendFloat64 = encoder.AppendFloat64 + appendString = encoder.AppendString + appendByteSlice = encoder.AppendByteSlice + appendNumber = encoder.AppendNumber + appendStructEnd = encoder.AppendStructEndIndent + appendIndent = encoder.AppendIndent + errUnsupportedValue = encoder.ErrUnsupportedValue + errUnsupportedFloat = encoder.ErrUnsupportedFloat + mapiterinit = encoder.MapIterInit + mapiterkey = encoder.MapIterKey + mapitervalue = encoder.MapIterValue + mapiternext = encoder.MapIterNext + maplen = encoder.MapLen +) + +type emptyInterface struct { + typ *runtime.Type + ptr unsafe.Pointer +} + +type nonEmptyInterface struct { + itab *struct { + ityp *runtime.Type // static interface type + typ *runtime.Type // dynamic concrete type + // unused fields... + } + ptr unsafe.Pointer +} + +func errUnimplementedOp(op encoder.OpType) error { + return fmt.Errorf("encoder (indent): opcode %s has not been implemented", op) +} + +func load(base uintptr, idx uint32) uintptr { + addr := base + uintptr(idx) + return **(**uintptr)(unsafe.Pointer(&addr)) +} + +func store(base uintptr, idx uint32, p uintptr) { + addr := base + uintptr(idx) + **(**uintptr)(unsafe.Pointer(&addr)) = p +} + +func loadNPtr(base uintptr, idx uint32, ptrNum uint8) uintptr { + addr := base + uintptr(idx) + p := **(**uintptr)(unsafe.Pointer(&addr)) + for i := uint8(0); i < ptrNum; i++ { + if p == 0 { + return 0 + } + p = ptrToPtr(p) + } + return p +} + +func ptrToUint64(p uintptr, bitSize uint8) uint64 { + switch bitSize { + case 8: + return (uint64)(**(**uint8)(unsafe.Pointer(&p))) + case 16: + return (uint64)(**(**uint16)(unsafe.Pointer(&p))) + case 32: + return (uint64)(**(**uint32)(unsafe.Pointer(&p))) + case 64: + return **(**uint64)(unsafe.Pointer(&p)) + } + return 0 +} +func ptrToFloat32(p uintptr) float32 { return **(**float32)(unsafe.Pointer(&p)) } +func ptrToFloat64(p uintptr) float64 { return **(**float64)(unsafe.Pointer(&p)) } +func ptrToBool(p uintptr) bool { return **(**bool)(unsafe.Pointer(&p)) } +func ptrToBytes(p uintptr) []byte { return **(**[]byte)(unsafe.Pointer(&p)) } +func ptrToNumber(p uintptr) json.Number { return **(**json.Number)(unsafe.Pointer(&p)) } +func ptrToString(p uintptr) string { return **(**string)(unsafe.Pointer(&p)) } +func ptrToSlice(p uintptr) *runtime.SliceHeader { return *(**runtime.SliceHeader)(unsafe.Pointer(&p)) } +func ptrToPtr(p uintptr) uintptr { + return uintptr(**(**unsafe.Pointer)(unsafe.Pointer(&p))) +} +func ptrToNPtr(p uintptr, ptrNum uint8) uintptr { + for i := uint8(0); i < ptrNum; i++ { + if p == 0 { + return 0 + } + p = ptrToPtr(p) + } + return p +} + +func ptrToUnsafePtr(p uintptr) unsafe.Pointer { + return *(*unsafe.Pointer)(unsafe.Pointer(&p)) +} +func ptrToInterface(code *encoder.Opcode, p uintptr) interface{} { + return *(*interface{})(unsafe.Pointer(&emptyInterface{ + typ: code.Type, + ptr: *(*unsafe.Pointer)(unsafe.Pointer(&p)), + })) +} + +func appendBool(_ *encoder.RuntimeContext, b []byte, v bool) []byte { + if v { + return append(b, "true"...) + } + return append(b, "false"...) +} + +func appendNull(_ *encoder.RuntimeContext, b []byte) []byte { + return append(b, "null"...) +} + +func appendComma(_ *encoder.RuntimeContext, b []byte) []byte { + return append(b, ',', '\n') +} + +func appendNullComma(_ *encoder.RuntimeContext, b []byte) []byte { + return append(b, "null,\n"...) +} + +func appendColon(_ *encoder.RuntimeContext, b []byte) []byte { + return append(b, ':', ' ') +} + +func appendMapKeyValue(ctx *encoder.RuntimeContext, code *encoder.Opcode, b, key, value []byte) []byte { + b = appendIndent(ctx, b, code.Indent+1) + b = append(b, key...) + b[len(b)-2] = ':' + b[len(b)-1] = ' ' + return append(b, value...) +} + +func appendMapEnd(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte { + b = b[:len(b)-2] + b = append(b, '\n') + b = appendIndent(ctx, b, code.Indent) + return append(b, '}', ',', '\n') +} + +func appendArrayHead(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte { + b = append(b, '[', '\n') + return appendIndent(ctx, b, code.Indent+1) +} + +func appendArrayEnd(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte { + b = b[:len(b)-2] + b = append(b, '\n') + b = appendIndent(ctx, b, code.Indent) + return append(b, ']', ',', '\n') +} + +func appendEmptyArray(_ *encoder.RuntimeContext, b []byte) []byte { + return append(b, '[', ']', ',', '\n') +} + +func appendEmptyObject(_ *encoder.RuntimeContext, b []byte) []byte { + return append(b, '{', '}', ',', '\n') +} + +func appendObjectEnd(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte { + last := len(b) - 1 + b[last] = '\n' + b = appendIndent(ctx, b, code.Indent-1) + return append(b, '}', ',', '\n') +} + +func appendMarshalJSON(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte, v interface{}) ([]byte, error) { + return encoder.AppendMarshalJSONIndent(ctx, code, b, v) +} + +func appendMarshalText(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte, v interface{}) ([]byte, error) { + return encoder.AppendMarshalTextIndent(ctx, code, b, v) +} + +func appendStructHead(_ *encoder.RuntimeContext, b []byte) []byte { + return append(b, '{', '\n') +} + +func appendStructKey(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte { + b = appendIndent(ctx, b, code.Indent) + b = append(b, code.Key...) + return append(b, ' ') +} + +func appendStructEndSkipLast(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte { + last := len(b) - 1 + if b[last-1] == '{' { + b[last] = '}' + } else { + if b[last] == '\n' { + // to remove ',' and '\n' characters + b = b[:len(b)-2] + } + b = append(b, '\n') + b = appendIndent(ctx, b, code.Indent-1) + b = append(b, '}') + } + return appendComma(ctx, b) +} + +func restoreIndent(ctx *encoder.RuntimeContext, code *encoder.Opcode, ctxptr uintptr) { + ctx.BaseIndent = uint32(load(ctxptr, code.Length)) +} + +func storeIndent(ctxptr uintptr, code *encoder.Opcode, indent uintptr) { + store(ctxptr, code.Length, indent) +} + +func appendArrayElemIndent(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte { + return appendIndent(ctx, b, code.Indent+1) +} + +func appendMapKeyIndent(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte { + return appendIndent(ctx, b, code.Indent) +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_indent/vm.go b/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_indent/vm.go new file mode 100644 index 00000000..836c5c8a --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/encoder/vm_indent/vm.go @@ -0,0 +1,4859 @@ +// Code generated by internal/cmd/generator. DO NOT EDIT! +package vm_indent + +import ( + "math" + "reflect" + "sort" + "unsafe" + + "github.com/goccy/go-json/internal/encoder" + "github.com/goccy/go-json/internal/runtime" +) + +func Run(ctx *encoder.RuntimeContext, b []byte, codeSet *encoder.OpcodeSet) ([]byte, error) { + recursiveLevel := 0 + ptrOffset := uintptr(0) + ctxptr := ctx.Ptr() + var code *encoder.Opcode + if (ctx.Option.Flag & encoder.HTMLEscapeOption) != 0 { + code = codeSet.EscapeKeyCode + } else { + code = codeSet.NoescapeKeyCode + } + + for { + switch code.Op { + default: + return nil, errUnimplementedOp(code.Op) + case encoder.OpPtr: + p := load(ctxptr, code.Idx) + code = code.Next + store(ctxptr, code.Idx, ptrToPtr(p)) + case encoder.OpIntPtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpInt: + b = appendInt(ctx, b, load(ctxptr, code.Idx), code) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpUintPtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpUint: + b = appendUint(ctx, b, load(ctxptr, code.Idx), code) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpIntString: + b = append(b, '"') + b = appendInt(ctx, b, load(ctxptr, code.Idx), code) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpUintString: + b = append(b, '"') + b = appendUint(ctx, b, load(ctxptr, code.Idx), code) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpFloat32Ptr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + b = appendComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpFloat32: + b = appendFloat32(ctx, b, ptrToFloat32(load(ctxptr, code.Idx))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpFloat64Ptr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpFloat64: + v := ptrToFloat64(load(ctxptr, code.Idx)) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStringPtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpString: + b = appendString(ctx, b, ptrToString(load(ctxptr, code.Idx))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpBoolPtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpBool: + b = appendBool(ctx, b, ptrToBool(load(ctxptr, code.Idx))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpBytesPtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpBytes: + b = appendByteSlice(ctx, b, ptrToBytes(load(ctxptr, code.Idx))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpNumberPtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpNumber: + bb, err := appendNumber(ctx, b, ptrToNumber(load(ctxptr, code.Idx))) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + code = code.Next + case encoder.OpInterfacePtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpInterface: + p := load(ctxptr, code.Idx) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + if recursiveLevel > encoder.StartDetectingCyclesAfter { + for _, seen := range ctx.SeenPtr { + if p == seen { + return nil, errUnsupportedValue(code, p) + } + } + } + ctx.SeenPtr = append(ctx.SeenPtr, p) + var ( + typ *runtime.Type + ifacePtr unsafe.Pointer + ) + up := ptrToUnsafePtr(p) + if code.Flags&encoder.NonEmptyInterfaceFlags != 0 { + iface := (*nonEmptyInterface)(up) + ifacePtr = iface.ptr + if iface.itab != nil { + typ = iface.itab.typ + } + } else { + iface := (*emptyInterface)(up) + ifacePtr = iface.ptr + typ = iface.typ + } + if ifacePtr == nil { + isDirectedNil := typ != nil && typ.Kind() == reflect.Struct && !runtime.IfaceIndir(typ) + if !isDirectedNil { + b = appendNullComma(ctx, b) + code = code.Next + break + } + } + ctx.KeepRefs = append(ctx.KeepRefs, up) + ifaceCodeSet, err := encoder.CompileToGetCodeSet(ctx, uintptr(unsafe.Pointer(typ))) + if err != nil { + return nil, err + } + + totalLength := uintptr(code.Length) + 3 + nextTotalLength := uintptr(ifaceCodeSet.CodeLength) + 3 + + var c *encoder.Opcode + if (ctx.Option.Flag & encoder.HTMLEscapeOption) != 0 { + c = ifaceCodeSet.InterfaceEscapeKeyCode + } else { + c = ifaceCodeSet.InterfaceNoescapeKeyCode + } + curlen := uintptr(len(ctx.Ptrs)) + offsetNum := ptrOffset / uintptrSize + oldOffset := ptrOffset + ptrOffset += totalLength * uintptrSize + oldBaseIndent := ctx.BaseIndent + ctx.BaseIndent += code.Indent + + newLen := offsetNum + totalLength + nextTotalLength + if curlen < newLen { + ctx.Ptrs = append(ctx.Ptrs, make([]uintptr, newLen-curlen)...) + } + ctxptr = ctx.Ptr() + ptrOffset // assign new ctxptr + + end := ifaceCodeSet.EndCode + store(ctxptr, c.Idx, uintptr(ifacePtr)) + store(ctxptr, end.Idx, oldOffset) + store(ctxptr, end.ElemIdx, uintptr(unsafe.Pointer(code.Next))) + storeIndent(ctxptr, end, uintptr(oldBaseIndent)) + code = c + recursiveLevel++ + case encoder.OpInterfaceEnd: + recursiveLevel-- + + // restore ctxptr + offset := load(ctxptr, code.Idx) + restoreIndent(ctx, code, ctxptr) + ctx.SeenPtr = ctx.SeenPtr[:len(ctx.SeenPtr)-1] + + codePtr := load(ctxptr, code.ElemIdx) + code = (*encoder.Opcode)(ptrToUnsafePtr(codePtr)) + ctxptr = ctx.Ptr() + offset + ptrOffset = offset + case encoder.OpMarshalJSONPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, ptrToPtr(p)) + fallthrough + case encoder.OpMarshalJSON: + p := load(ctxptr, code.Idx) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + if (code.Flags&encoder.IsNilableTypeFlags) != 0 && (code.Flags&encoder.IndirectFlags) != 0 { + p = ptrToPtr(p) + } + bb, err := appendMarshalJSON(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + code = code.Next + case encoder.OpMarshalTextPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + store(ctxptr, code.Idx, ptrToPtr(p)) + fallthrough + case encoder.OpMarshalText: + p := load(ctxptr, code.Idx) + if p == 0 { + b = append(b, `""`...) + b = appendComma(ctx, b) + code = code.Next + break + } + if (code.Flags&encoder.IsNilableTypeFlags) != 0 && (code.Flags&encoder.IndirectFlags) != 0 { + p = ptrToPtr(p) + } + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + code = code.Next + case encoder.OpSlicePtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.End.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpSlice: + p := load(ctxptr, code.Idx) + slice := ptrToSlice(p) + if p == 0 || slice.Data == nil { + b = appendNullComma(ctx, b) + code = code.End.Next + break + } + store(ctxptr, code.ElemIdx, 0) + store(ctxptr, code.Length, uintptr(slice.Len)) + store(ctxptr, code.Idx, uintptr(slice.Data)) + if slice.Len > 0 { + b = appendArrayHead(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, uintptr(slice.Data)) + } else { + b = appendEmptyArray(ctx, b) + code = code.End.Next + } + case encoder.OpSliceElem: + idx := load(ctxptr, code.ElemIdx) + length := load(ctxptr, code.Length) + idx++ + if idx < length { + b = appendArrayElemIndent(ctx, code, b) + store(ctxptr, code.ElemIdx, idx) + data := load(ctxptr, code.Idx) + size := uintptr(code.Size) + code = code.Next + store(ctxptr, code.Idx, data+idx*size) + } else { + b = appendArrayEnd(ctx, code, b) + code = code.End.Next + } + case encoder.OpArrayPtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.End.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpArray: + p := load(ctxptr, code.Idx) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.End.Next + break + } + if code.Length > 0 { + b = appendArrayHead(ctx, code, b) + store(ctxptr, code.ElemIdx, 0) + code = code.Next + store(ctxptr, code.Idx, p) + } else { + b = appendEmptyArray(ctx, b) + code = code.End.Next + } + case encoder.OpArrayElem: + idx := load(ctxptr, code.ElemIdx) + idx++ + if idx < uintptr(code.Length) { + b = appendArrayElemIndent(ctx, code, b) + store(ctxptr, code.ElemIdx, idx) + p := load(ctxptr, code.Idx) + size := uintptr(code.Size) + code = code.Next + store(ctxptr, code.Idx, p+idx*size) + } else { + b = appendArrayEnd(ctx, code, b) + code = code.End.Next + } + case encoder.OpMapPtr: + p := loadNPtr(ctxptr, code.Idx, code.PtrNum) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.End.Next + break + } + store(ctxptr, code.Idx, p) + fallthrough + case encoder.OpMap: + p := load(ctxptr, code.Idx) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.End.Next + break + } + uptr := ptrToUnsafePtr(p) + mlen := maplen(uptr) + if mlen <= 0 { + b = appendEmptyObject(ctx, b) + code = code.End.Next + break + } + b = appendStructHead(ctx, b) + unorderedMap := (ctx.Option.Flag & encoder.UnorderedMapOption) != 0 + mapCtx := encoder.NewMapContext(mlen, unorderedMap) + mapiterinit(code.Type, uptr, &mapCtx.Iter) + store(ctxptr, code.Idx, uintptr(unsafe.Pointer(mapCtx))) + ctx.KeepRefs = append(ctx.KeepRefs, unsafe.Pointer(mapCtx)) + if unorderedMap { + b = appendMapKeyIndent(ctx, code.Next, b) + } else { + mapCtx.Start = len(b) + mapCtx.First = len(b) + } + key := mapiterkey(&mapCtx.Iter) + store(ctxptr, code.Next.Idx, uintptr(key)) + code = code.Next + case encoder.OpMapKey: + mapCtx := (*encoder.MapContext)(ptrToUnsafePtr(load(ctxptr, code.Idx))) + idx := mapCtx.Idx + idx++ + if (ctx.Option.Flag & encoder.UnorderedMapOption) != 0 { + if idx < mapCtx.Len { + b = appendMapKeyIndent(ctx, code, b) + mapCtx.Idx = int(idx) + key := mapiterkey(&mapCtx.Iter) + store(ctxptr, code.Next.Idx, uintptr(key)) + code = code.Next + } else { + b = appendObjectEnd(ctx, code, b) + encoder.ReleaseMapContext(mapCtx) + code = code.End.Next + } + } else { + mapCtx.Slice.Items[mapCtx.Idx].Value = b[mapCtx.Start:len(b)] + if idx < mapCtx.Len { + mapCtx.Idx = int(idx) + mapCtx.Start = len(b) + key := mapiterkey(&mapCtx.Iter) + store(ctxptr, code.Next.Idx, uintptr(key)) + code = code.Next + } else { + code = code.End + } + } + case encoder.OpMapValue: + mapCtx := (*encoder.MapContext)(ptrToUnsafePtr(load(ctxptr, code.Idx))) + if (ctx.Option.Flag & encoder.UnorderedMapOption) != 0 { + b = appendColon(ctx, b) + } else { + mapCtx.Slice.Items[mapCtx.Idx].Key = b[mapCtx.Start:len(b)] + mapCtx.Start = len(b) + } + value := mapitervalue(&mapCtx.Iter) + store(ctxptr, code.Next.Idx, uintptr(value)) + mapiternext(&mapCtx.Iter) + code = code.Next + case encoder.OpMapEnd: + // this operation only used by sorted map. + mapCtx := (*encoder.MapContext)(ptrToUnsafePtr(load(ctxptr, code.Idx))) + sort.Sort(mapCtx.Slice) + buf := mapCtx.Buf + for _, item := range mapCtx.Slice.Items { + buf = appendMapKeyValue(ctx, code, buf, item.Key, item.Value) + } + buf = appendMapEnd(ctx, code, buf) + b = b[:mapCtx.First] + b = append(b, buf...) + mapCtx.Buf = buf + encoder.ReleaseMapContext(mapCtx) + code = code.Next + case encoder.OpRecursivePtr: + p := load(ctxptr, code.Idx) + if p == 0 { + code = code.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpRecursive: + ptr := load(ctxptr, code.Idx) + if ptr != 0 { + if recursiveLevel > encoder.StartDetectingCyclesAfter { + for _, seen := range ctx.SeenPtr { + if ptr == seen { + return nil, errUnsupportedValue(code, ptr) + } + } + } + } + ctx.SeenPtr = append(ctx.SeenPtr, ptr) + c := code.Jmp.Code + curlen := uintptr(len(ctx.Ptrs)) + offsetNum := ptrOffset / uintptrSize + oldOffset := ptrOffset + ptrOffset += code.Jmp.CurLen * uintptrSize + oldBaseIndent := ctx.BaseIndent + indentDiffFromTop := c.Indent - 1 + ctx.BaseIndent += code.Indent - indentDiffFromTop + + newLen := offsetNum + code.Jmp.CurLen + code.Jmp.NextLen + if curlen < newLen { + ctx.Ptrs = append(ctx.Ptrs, make([]uintptr, newLen-curlen)...) + } + ctxptr = ctx.Ptr() + ptrOffset // assign new ctxptr + + store(ctxptr, c.Idx, ptr) + store(ctxptr, c.End.Next.Idx, oldOffset) + store(ctxptr, c.End.Next.ElemIdx, uintptr(unsafe.Pointer(code.Next))) + storeIndent(ctxptr, c.End.Next, uintptr(oldBaseIndent)) + code = c + recursiveLevel++ + case encoder.OpRecursiveEnd: + recursiveLevel-- + + // restore ctxptr + restoreIndent(ctx, code, ctxptr) + offset := load(ctxptr, code.Idx) + ctx.SeenPtr = ctx.SeenPtr[:len(ctx.SeenPtr)-1] + + codePtr := load(ctxptr, code.ElemIdx) + code = (*encoder.Opcode)(ptrToUnsafePtr(codePtr)) + ctxptr = ctx.Ptr() + offset + ptrOffset = offset + case encoder.OpStructPtrHead: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHead: + p := load(ctxptr, code.Idx) + if p == 0 && ((code.Flags&encoder.IndirectFlags) != 0 || code.Next.Op == encoder.OpStructEnd) { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if len(code.Key) > 0 { + if (code.Flags&encoder.IsTaggedKeyFlags) != 0 || code.Flags&encoder.AnonymousKeyFlags == 0 { + b = appendStructKey(ctx, code, b) + } + } + p += uintptr(code.Offset) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructPtrHeadOmitEmpty: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmpty: + p := load(ctxptr, code.Idx) + if p == 0 && ((code.Flags&encoder.IndirectFlags) != 0 || code.Next.Op == encoder.OpStructEnd) { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + p += uintptr(code.Offset) + if p == 0 || (ptrToPtr(p) == 0 && (code.Flags&encoder.IsNextOpPtrTypeFlags) != 0) { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructPtrHeadInt: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadInt: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyInt: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyInt: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadIntString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadIntString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyIntString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyIntString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + p += uintptr(code.Offset) + u64 := ptrToUint64(p, code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p, code) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadIntPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadIntPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendInt(ctx, b, p, code) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyIntPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyIntPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p, code) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadIntPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadIntPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendInt(ctx, b, p, code) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyIntPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyIntPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p, code) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadUint: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadUint: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyUint: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyUint: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadUintString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadUintString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyUintString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyUintString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadUintPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadUintPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendUint(ctx, b, p, code) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyUintPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyUintPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p, code) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadUintPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadUintPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendUint(ctx, b, p, code) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyUintPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyUintPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p, code) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadFloat32: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadFloat32: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, ptrToFloat32(p+uintptr(code.Offset))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyFloat32: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyFloat32: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToFloat32(p + uintptr(code.Offset)) + if v == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadFloat32String: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadFloat32String: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p+uintptr(code.Offset))) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyFloat32String: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyFloat32String: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToFloat32(p + uintptr(code.Offset)) + if v == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadFloat32Ptr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadFloat32Ptr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendFloat32(ctx, b, ptrToFloat32(p)) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyFloat32Ptr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyFloat32Ptr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadFloat32PtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadFloat32PtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyFloat32PtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyFloat32PtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadFloat64: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadFloat64: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + v := ptrToFloat64(p + uintptr(code.Offset)) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = appendFloat64(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyFloat64: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyFloat64: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToFloat64(p + uintptr(code.Offset)) + if v == 0 { + code = code.NextField + } else { + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = appendFloat64(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadFloat64String: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadFloat64String: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToFloat64(p + uintptr(code.Offset)) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyFloat64String: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyFloat64String: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToFloat64(p + uintptr(code.Offset)) + if v == 0 { + code = code.NextField + } else { + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadFloat64Ptr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadFloat64Ptr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyFloat64Ptr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyFloat64Ptr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadFloat64PtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadFloat64PtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyFloat64PtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyFloat64PtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNull(ctx, b) + b = appendComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, ptrToString(p+uintptr(code.Offset))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToString(p + uintptr(code.Offset)) + if v == "" { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadStringString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadStringString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, ptrToString(p+uintptr(code.Offset))))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyStringString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyStringString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToString(p + uintptr(code.Offset)) + if v == "" { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, v))) + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadStringPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadStringPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendString(ctx, b, ptrToString(p)) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyStringPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyStringPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, ptrToString(p)) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadStringPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadStringPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendString(ctx, b, string(appendString(ctx, []byte{}, ptrToString(p)))) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyStringPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyStringPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, ptrToString(p)))) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadBool: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadBool: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, ptrToBool(p+uintptr(code.Offset))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyBool: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyBool: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToBool(p + uintptr(code.Offset)) + if v { + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + } else { + code = code.NextField + } + case encoder.OpStructPtrHeadBoolString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadBoolString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p+uintptr(code.Offset))) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyBoolString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyBoolString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToBool(p + uintptr(code.Offset)) + if v { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + } else { + code = code.NextField + } + case encoder.OpStructPtrHeadBoolPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadBoolPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendBool(ctx, b, ptrToBool(p)) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyBoolPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyBoolPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, ptrToBool(p)) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadBoolPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadBoolPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p)) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyBoolPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyBoolPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p)) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadBytes: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadBytes: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, ptrToBytes(p+uintptr(code.Offset))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyBytes: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyBytes: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToBytes(p + uintptr(code.Offset)) + if len(v) == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadBytesPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadBytesPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendByteSlice(ctx, b, ptrToBytes(p)) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyBytesPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyBytesPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, ptrToBytes(p)) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadNumber: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadNumber: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, ptrToNumber(p+uintptr(code.Offset))) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyNumber: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyNumber: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToNumber(p + uintptr(code.Offset)) + if v == "" { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, v) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + code = code.Next + } + case encoder.OpStructPtrHeadNumberString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadNumberString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p+uintptr(code.Offset))) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyNumberString: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyNumberString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + v := ptrToNumber(p + uintptr(code.Offset)) + if v == "" { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, v) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadNumberPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadNumberPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyNumberPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyNumberPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + } + code = code.Next + case encoder.OpStructPtrHeadNumberPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadNumberPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = append(bb, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyNumberPtrString: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyNumberPtrString: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructPtrHeadArray, encoder.OpStructPtrHeadSlice: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadArray, encoder.OpStructHeadSlice: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + p += uintptr(code.Offset) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructPtrHeadOmitEmptyArray: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyArray: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + p += uintptr(code.Offset) + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructPtrHeadOmitEmptySlice: + if (code.Flags & encoder.IndirectFlags) != 0 { + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptySlice: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + p += uintptr(code.Offset) + slice := ptrToSlice(p) + if slice.Len == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructPtrHeadArrayPtr, encoder.OpStructPtrHeadSlicePtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadArrayPtr, encoder.OpStructHeadSlicePtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNullComma(ctx, b) + code = code.NextField + } else { + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructPtrHeadOmitEmptyArrayPtr, encoder.OpStructPtrHeadOmitEmptySlicePtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyArrayPtr, encoder.OpStructHeadOmitEmptySlicePtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructPtrHeadMap: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadMap: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if p != 0 && (code.Flags&encoder.IndirectFlags) != 0 { + p = ptrToPtr(p + uintptr(code.Offset)) + } + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructPtrHeadOmitEmptyMap: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyMap: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if p != 0 && (code.Flags&encoder.IndirectFlags) != 0 { + p = ptrToPtr(p + uintptr(code.Offset)) + } + if maplen(ptrToUnsafePtr(p)) == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructPtrHeadMapPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadMapPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.NextField + break + } + p = ptrToPtr(p + uintptr(code.Offset)) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.NextField + } else { + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p, code.PtrNum) + } + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructPtrHeadOmitEmptyMapPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyMapPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if p == 0 { + code = code.NextField + break + } + p = ptrToPtr(p + uintptr(code.Offset)) + if p == 0 { + code = code.NextField + } else { + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p, code.PtrNum) + } + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructPtrHeadMarshalJSON: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if (code.Flags & encoder.IndirectFlags) != 0 { + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadMarshalJSON: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + p += uintptr(code.Offset) + if (code.Flags & encoder.IsNilableTypeFlags) != 0 { + if (code.Flags&encoder.IndirectFlags) != 0 || code.Op == encoder.OpStructPtrHeadMarshalJSON { + p = ptrToPtr(p) + } + } + if p == 0 && (code.Flags&encoder.NilCheckFlags) != 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendMarshalJSON(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyMarshalJSON: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if (code.Flags & encoder.IndirectFlags) != 0 { + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyMarshalJSON: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + p += uintptr(code.Offset) + if (code.Flags & encoder.IsNilableTypeFlags) != 0 { + if (code.Flags&encoder.IndirectFlags) != 0 || code.Op == encoder.OpStructPtrHeadOmitEmptyMarshalJSON { + p = ptrToPtr(p) + } + } + iface := ptrToInterface(code, p) + if (code.Flags&encoder.NilCheckFlags) != 0 && encoder.IsNilForMarshaler(iface) { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + bb, err := appendMarshalJSON(ctx, code, b, iface) + if err != nil { + return nil, err + } + b = bb + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadMarshalJSONPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadMarshalJSONPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendMarshalJSON(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyMarshalJSONPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyMarshalJSONPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if p == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + bb, err := appendMarshalJSON(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadMarshalText: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if (code.Flags & encoder.IndirectFlags) != 0 { + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadMarshalText: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + p += uintptr(code.Offset) + if (code.Flags & encoder.IsNilableTypeFlags) != 0 { + if (code.Flags&encoder.IndirectFlags) != 0 || code.Op == encoder.OpStructPtrHeadMarshalText { + p = ptrToPtr(p) + } + } + if p == 0 && (code.Flags&encoder.NilCheckFlags) != 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyMarshalText: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if (code.Flags & encoder.IndirectFlags) != 0 { + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + } + fallthrough + case encoder.OpStructHeadOmitEmptyMarshalText: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + p += uintptr(code.Offset) + if (code.Flags & encoder.IsNilableTypeFlags) != 0 { + if (code.Flags&encoder.IndirectFlags) != 0 || code.Op == encoder.OpStructPtrHeadOmitEmptyMarshalText { + p = ptrToPtr(p) + } + } + if p == 0 && (code.Flags&encoder.NilCheckFlags) != 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructPtrHeadMarshalTextPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadMarshalTextPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + b = appendStructKey(ctx, code, b) + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if p == 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructPtrHeadOmitEmptyMarshalTextPtr: + p := load(ctxptr, code.Idx) + if p == 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + store(ctxptr, code.Idx, ptrToNPtr(p, code.PtrNum)) + fallthrough + case encoder.OpStructHeadOmitEmptyMarshalTextPtr: + p := load(ctxptr, code.Idx) + if p == 0 && (code.Flags&encoder.IndirectFlags) != 0 { + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendNullComma(ctx, b) + } + code = code.End.Next + break + } + if (code.Flags & encoder.IndirectFlags) != 0 { + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + } + if code.Flags&encoder.AnonymousHeadFlags == 0 { + b = appendStructHead(ctx, b) + } + if p == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + b = appendComma(ctx, b) + code = code.Next + } + case encoder.OpStructField: + if code.Flags&encoder.IsTaggedKeyFlags != 0 || code.Flags&encoder.AnonymousKeyFlags == 0 { + b = appendStructKey(ctx, code, b) + } + p := load(ctxptr, code.Idx) + uintptr(code.Offset) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldOmitEmpty: + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + if ptrToPtr(p) == 0 && (code.Flags&encoder.IsNextOpPtrTypeFlags) != 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructFieldInt: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyInt: + p := load(ctxptr, code.Idx) + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldIntString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyIntString: + p := load(ctxptr, code.Idx) + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldIntPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendInt(ctx, b, p, code) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyIntPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p, code) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldIntPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendInt(ctx, b, p, code) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyIntPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p, code) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldUint: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyUint: + p := load(ctxptr, code.Idx) + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldUintString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyUintString: + p := load(ctxptr, code.Idx) + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldUintPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendUint(ctx, b, p, code) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyUintPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p, code) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldUintPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendUint(ctx, b, p, code) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyUintPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p, code) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldFloat32: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, ptrToFloat32(p+uintptr(code.Offset))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyFloat32: + p := load(ctxptr, code.Idx) + v := ptrToFloat32(p + uintptr(code.Offset)) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, v) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldFloat32String: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p+uintptr(code.Offset))) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyFloat32String: + p := load(ctxptr, code.Idx) + v := ptrToFloat32(p + uintptr(code.Offset)) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldFloat32Ptr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendFloat32(ctx, b, ptrToFloat32(p)) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyFloat32Ptr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldFloat32PtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyFloat32PtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldFloat64: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + v := ptrToFloat64(p + uintptr(code.Offset)) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyFloat64: + p := load(ctxptr, code.Idx) + v := ptrToFloat64(p + uintptr(code.Offset)) + if v != 0 { + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = appendFloat64(ctx, b, v) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldFloat64String: + p := load(ctxptr, code.Idx) + v := ptrToFloat64(p + uintptr(code.Offset)) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyFloat64String: + p := load(ctxptr, code.Idx) + v := ptrToFloat64(p + uintptr(code.Offset)) + if v != 0 { + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldFloat64Ptr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNullComma(ctx, b) + code = code.Next + break + } + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyFloat64Ptr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldFloat64PtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = append(b, '"') + b = appendFloat64(ctx, b, v) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyFloat64PtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, ptrToString(p+uintptr(code.Offset))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyString: + p := load(ctxptr, code.Idx) + v := ptrToString(p + uintptr(code.Offset)) + if v != "" { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, v) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldStringString: + p := load(ctxptr, code.Idx) + s := ptrToString(p + uintptr(code.Offset)) + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, s))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyStringString: + p := load(ctxptr, code.Idx) + v := ptrToString(p + uintptr(code.Offset)) + if v != "" { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, v))) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldStringPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendString(ctx, b, ptrToString(p)) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyStringPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, ptrToString(p)) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldStringPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendString(ctx, b, string(appendString(ctx, []byte{}, ptrToString(p)))) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyStringPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, ptrToString(p)))) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldBool: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, ptrToBool(p+uintptr(code.Offset))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyBool: + p := load(ctxptr, code.Idx) + v := ptrToBool(p + uintptr(code.Offset)) + if v { + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, v) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldBoolString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p+uintptr(code.Offset))) + b = append(b, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyBoolString: + p := load(ctxptr, code.Idx) + v := ptrToBool(p + uintptr(code.Offset)) + if v { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, v) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldBoolPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendBool(ctx, b, ptrToBool(p)) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyBoolPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, ptrToBool(p)) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldBoolPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p)) + b = append(b, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyBoolPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p)) + b = append(b, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldBytes: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, ptrToBytes(p+uintptr(code.Offset))) + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyBytes: + p := load(ctxptr, code.Idx) + v := ptrToBytes(p + uintptr(code.Offset)) + if len(v) > 0 { + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, v) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldBytesPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendByteSlice(ctx, b, ptrToBytes(p)) + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyBytesPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, ptrToBytes(p)) + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldNumber: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, ptrToNumber(p+uintptr(code.Offset))) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + code = code.Next + case encoder.OpStructFieldOmitEmptyNumber: + p := load(ctxptr, code.Idx) + v := ptrToNumber(p + uintptr(code.Offset)) + if v != "" { + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, v) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + } + code = code.Next + case encoder.OpStructFieldNumberString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p+uintptr(code.Offset))) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyNumberString: + p := load(ctxptr, code.Idx) + v := ptrToNumber(p + uintptr(code.Offset)) + if v != "" { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, v) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldNumberPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyNumberPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + } + code = code.Next + case encoder.OpStructFieldNumberPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + b = appendStructKey(ctx, code, b) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = append(bb, '"') + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyNumberPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendComma(ctx, b) + } + code = code.Next + case encoder.OpStructFieldMarshalJSON: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + p += uintptr(code.Offset) + if (code.Flags & encoder.IsNilableTypeFlags) != 0 { + p = ptrToPtr(p) + } + if p == 0 && (code.Flags&encoder.NilCheckFlags) != 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendMarshalJSON(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyMarshalJSON: + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + if (code.Flags & encoder.IsNilableTypeFlags) != 0 { + p = ptrToPtr(p) + } + if p == 0 && (code.Flags&encoder.NilCheckFlags) != 0 { + code = code.NextField + break + } + iface := ptrToInterface(code, p) + if (code.Flags&encoder.NilCheckFlags) != 0 && encoder.IsNilForMarshaler(iface) { + code = code.NextField + break + } + b = appendStructKey(ctx, code, b) + bb, err := appendMarshalJSON(ctx, code, b, iface) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + code = code.Next + case encoder.OpStructFieldMarshalJSONPtr: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendMarshalJSON(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyMarshalJSONPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + bb, err := appendMarshalJSON(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + } + code = code.Next + case encoder.OpStructFieldMarshalText: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + p += uintptr(code.Offset) + if (code.Flags & encoder.IsNilableTypeFlags) != 0 { + p = ptrToPtr(p) + } + if p == 0 && (code.Flags&encoder.NilCheckFlags) != 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyMarshalText: + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + if (code.Flags & encoder.IsNilableTypeFlags) != 0 { + p = ptrToPtr(p) + } + if p == 0 && (code.Flags&encoder.NilCheckFlags) != 0 { + code = code.NextField + break + } + b = appendStructKey(ctx, code, b) + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + code = code.Next + case encoder.OpStructFieldMarshalTextPtr: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendComma(ctx, b) + code = code.Next + case encoder.OpStructFieldOmitEmptyMarshalTextPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + bb, err := appendMarshalText(ctx, code, b, ptrToInterface(code, p)) + if err != nil { + return nil, err + } + b = appendComma(ctx, bb) + } + code = code.Next + case encoder.OpStructFieldArray: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldOmitEmptyArray: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldArrayPtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldOmitEmptyArrayPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } else { + code = code.NextField + } + case encoder.OpStructFieldSlice: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldOmitEmptySlice: + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + slice := ptrToSlice(p) + if slice.Len == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructFieldSlicePtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldOmitEmptySlicePtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } else { + code = code.NextField + } + case encoder.OpStructFieldMap: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToPtr(p + uintptr(code.Offset)) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldOmitEmptyMap: + p := load(ctxptr, code.Idx) + p = ptrToPtr(p + uintptr(code.Offset)) + if p == 0 || maplen(ptrToUnsafePtr(p)) == 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructFieldMapPtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToPtr(p + uintptr(code.Offset)) + if p != 0 { + p = ptrToNPtr(p, code.PtrNum) + } + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldOmitEmptyMapPtr: + p := load(ctxptr, code.Idx) + p = ptrToPtr(p + uintptr(code.Offset)) + if p != 0 { + p = ptrToNPtr(p, code.PtrNum) + } + if p != 0 { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } else { + code = code.NextField + } + case encoder.OpStructFieldStruct: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + code = code.Next + store(ctxptr, code.Idx, p) + case encoder.OpStructFieldOmitEmptyStruct: + p := load(ctxptr, code.Idx) + p += uintptr(code.Offset) + if ptrToPtr(p) == 0 && (code.Flags&encoder.IsNextOpPtrTypeFlags) != 0 { + code = code.NextField + } else { + b = appendStructKey(ctx, code, b) + code = code.Next + store(ctxptr, code.Idx, p) + } + case encoder.OpStructEnd: + b = appendStructEndSkipLast(ctx, code, b) + code = code.Next + case encoder.OpStructEndInt: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyInt: + p := load(ctxptr, code.Idx) + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndIntString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyIntString: + p := load(ctxptr, code.Idx) + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndIntPtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendInt(ctx, b, p, code) + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyIntPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendInt(ctx, b, p, code) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndIntPtrString: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendInt(ctx, b, p, code) + b = append(b, '"') + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyIntPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendInt(ctx, b, p, code) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndUint: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyUint: + p := load(ctxptr, code.Idx) + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndUintString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyUintString: + p := load(ctxptr, code.Idx) + u64 := ptrToUint64(p+uintptr(code.Offset), code.NumBitSize) + v := u64 & ((1 << code.NumBitSize) - 1) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p+uintptr(code.Offset), code) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndUintPtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendUint(ctx, b, p, code) + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyUintPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendUint(ctx, b, p, code) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndUintPtrString: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendUint(ctx, b, p, code) + b = append(b, '"') + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyUintPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendUint(ctx, b, p, code) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndFloat32: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, ptrToFloat32(p+uintptr(code.Offset))) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyFloat32: + p := load(ctxptr, code.Idx) + v := ptrToFloat32(p + uintptr(code.Offset)) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, v) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndFloat32String: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p+uintptr(code.Offset))) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyFloat32String: + p := load(ctxptr, code.Idx) + v := ptrToFloat32(p + uintptr(code.Offset)) + if v != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, v) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndFloat32Ptr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendFloat32(ctx, b, ptrToFloat32(p)) + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyFloat32Ptr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndFloat32PtrString: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = append(b, '"') + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyFloat32PtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat32(ctx, b, ptrToFloat32(p)) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndFloat64: + p := load(ctxptr, code.Idx) + v := ptrToFloat64(p + uintptr(code.Offset)) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = appendFloat64(ctx, b, v) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyFloat64: + p := load(ctxptr, code.Idx) + v := ptrToFloat64(p + uintptr(code.Offset)) + if v != 0 { + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = appendFloat64(ctx, b, v) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndFloat64String: + p := load(ctxptr, code.Idx) + v := ptrToFloat64(p + uintptr(code.Offset)) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyFloat64String: + p := load(ctxptr, code.Idx) + v := ptrToFloat64(p + uintptr(code.Offset)) + if v != 0 { + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndFloat64Ptr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + b = appendStructEnd(ctx, code, b) + code = code.Next + break + } + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyFloat64Ptr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndFloat64PtrString: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = appendFloat64(ctx, b, v) + b = append(b, '"') + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyFloat64PtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + v := ptrToFloat64(p) + if math.IsInf(v, 0) || math.IsNaN(v) { + return nil, errUnsupportedFloat(v) + } + b = append(b, '"') + b = appendFloat64(ctx, b, v) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, ptrToString(p+uintptr(code.Offset))) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyString: + p := load(ctxptr, code.Idx) + v := ptrToString(p + uintptr(code.Offset)) + if v != "" { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, v) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndStringString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + s := ptrToString(p + uintptr(code.Offset)) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, s))) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyStringString: + p := load(ctxptr, code.Idx) + v := ptrToString(p + uintptr(code.Offset)) + if v != "" { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, v))) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndStringPtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendString(ctx, b, ptrToString(p)) + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyStringPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, ptrToString(p)) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndStringPtrString: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendString(ctx, b, string(appendString(ctx, []byte{}, ptrToString(p)))) + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyStringPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendString(ctx, b, string(appendString(ctx, []byte{}, ptrToString(p)))) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndBool: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, ptrToBool(p+uintptr(code.Offset))) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyBool: + p := load(ctxptr, code.Idx) + v := ptrToBool(p + uintptr(code.Offset)) + if v { + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, v) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndBoolString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p+uintptr(code.Offset))) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyBoolString: + p := load(ctxptr, code.Idx) + v := ptrToBool(p + uintptr(code.Offset)) + if v { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, v) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndBoolPtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendBool(ctx, b, ptrToBool(p)) + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyBoolPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendBool(ctx, b, ptrToBool(p)) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndBoolPtrString: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p)) + b = append(b, '"') + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyBoolPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + b = appendBool(ctx, b, ptrToBool(p)) + b = append(b, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndBytes: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, ptrToBytes(p+uintptr(code.Offset))) + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyBytes: + p := load(ctxptr, code.Idx) + v := ptrToBytes(p + uintptr(code.Offset)) + if len(v) > 0 { + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, v) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndBytesPtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = appendByteSlice(ctx, b, ptrToBytes(p)) + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyBytesPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = appendByteSlice(ctx, b, ptrToBytes(p)) + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndNumber: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, ptrToNumber(p+uintptr(code.Offset))) + if err != nil { + return nil, err + } + b = appendStructEnd(ctx, code, bb) + code = code.Next + case encoder.OpStructEndOmitEmptyNumber: + p := load(ctxptr, code.Idx) + v := ptrToNumber(p + uintptr(code.Offset)) + if v != "" { + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, v) + if err != nil { + return nil, err + } + b = appendStructEnd(ctx, code, bb) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndNumberString: + p := load(ctxptr, code.Idx) + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p+uintptr(code.Offset))) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyNumberString: + p := load(ctxptr, code.Idx) + v := ptrToNumber(p + uintptr(code.Offset)) + if v != "" { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, v) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndNumberPtr: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = bb + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyNumberPtr: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = appendStructEnd(ctx, code, bb) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpStructEndNumberPtrString: + b = appendStructKey(ctx, code, b) + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p == 0 { + b = appendNull(ctx, b) + } else { + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = append(bb, '"') + } + b = appendStructEnd(ctx, code, b) + code = code.Next + case encoder.OpStructEndOmitEmptyNumberPtrString: + p := load(ctxptr, code.Idx) + p = ptrToNPtr(p+uintptr(code.Offset), code.PtrNum) + if p != 0 { + b = appendStructKey(ctx, code, b) + b = append(b, '"') + bb, err := appendNumber(ctx, b, ptrToNumber(p)) + if err != nil { + return nil, err + } + b = append(bb, '"') + b = appendStructEnd(ctx, code, b) + } else { + b = appendStructEndSkipLast(ctx, code, b) + } + code = code.Next + case encoder.OpEnd: + goto END + } + } +END: + return b, nil +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/errors/error.go b/index/server/vendor/github.com/goccy/go-json/internal/errors/error.go new file mode 100644 index 00000000..d58e39f4 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/errors/error.go @@ -0,0 +1,164 @@ +package errors + +import ( + "fmt" + "reflect" + "strconv" +) + +type InvalidUTF8Error struct { + S string // the whole string value that caused the error +} + +func (e *InvalidUTF8Error) Error() string { + return fmt.Sprintf("json: invalid UTF-8 in string: %s", strconv.Quote(e.S)) +} + +type InvalidUnmarshalError struct { + Type reflect.Type +} + +func (e *InvalidUnmarshalError) Error() string { + if e.Type == nil { + return "json: Unmarshal(nil)" + } + + if e.Type.Kind() != reflect.Ptr { + return fmt.Sprintf("json: Unmarshal(non-pointer %s)", e.Type) + } + return fmt.Sprintf("json: Unmarshal(nil %s)", e.Type) +} + +// A MarshalerError represents an error from calling a MarshalJSON or MarshalText method. +type MarshalerError struct { + Type reflect.Type + Err error + sourceFunc string +} + +func (e *MarshalerError) Error() string { + srcFunc := e.sourceFunc + if srcFunc == "" { + srcFunc = "MarshalJSON" + } + return fmt.Sprintf("json: error calling %s for type %s: %s", srcFunc, e.Type, e.Err.Error()) +} + +// Unwrap returns the underlying error. +func (e *MarshalerError) Unwrap() error { return e.Err } + +// A SyntaxError is a description of a JSON syntax error. +type SyntaxError struct { + msg string // description of error + Offset int64 // error occurred after reading Offset bytes +} + +func (e *SyntaxError) Error() string { return e.msg } + +// An UnmarshalFieldError describes a JSON object key that +// led to an unexported (and therefore unwritable) struct field. +// +// Deprecated: No longer used; kept for compatibility. +type UnmarshalFieldError struct { + Key string + Type reflect.Type + Field reflect.StructField +} + +func (e *UnmarshalFieldError) Error() string { + return fmt.Sprintf("json: cannot unmarshal object key %s into unexported field %s of type %s", + strconv.Quote(e.Key), e.Field.Name, e.Type.String(), + ) +} + +// An UnmarshalTypeError describes a JSON value that was +// not appropriate for a value of a specific Go type. +type UnmarshalTypeError struct { + Value string // description of JSON value - "bool", "array", "number -5" + Type reflect.Type // type of Go value it could not be assigned to + Offset int64 // error occurred after reading Offset bytes + Struct string // name of the struct type containing the field + Field string // the full path from root node to the field +} + +func (e *UnmarshalTypeError) Error() string { + if e.Struct != "" || e.Field != "" { + return fmt.Sprintf("json: cannot unmarshal %s into Go struct field %s.%s of type %s", + e.Value, e.Struct, e.Field, e.Type, + ) + } + return fmt.Sprintf("json: cannot unmarshal %s into Go value of type %s", e.Value, e.Type) +} + +// An UnsupportedTypeError is returned by Marshal when attempting +// to encode an unsupported value type. +type UnsupportedTypeError struct { + Type reflect.Type +} + +func (e *UnsupportedTypeError) Error() string { + return fmt.Sprintf("json: unsupported type: %s", e.Type) +} + +type UnsupportedValueError struct { + Value reflect.Value + Str string +} + +func (e *UnsupportedValueError) Error() string { + return fmt.Sprintf("json: unsupported value: %s", e.Str) +} + +func ErrSyntax(msg string, offset int64) *SyntaxError { + return &SyntaxError{msg: msg, Offset: offset} +} + +func ErrMarshaler(typ reflect.Type, err error, msg string) *MarshalerError { + return &MarshalerError{ + Type: typ, + Err: err, + sourceFunc: msg, + } +} + +func ErrExceededMaxDepth(c byte, cursor int64) *SyntaxError { + return &SyntaxError{ + msg: fmt.Sprintf(`invalid character "%c" exceeded max depth`, c), + Offset: cursor, + } +} + +func ErrNotAtBeginningOfValue(cursor int64) *SyntaxError { + return &SyntaxError{msg: "not at beginning of value", Offset: cursor} +} + +func ErrUnexpectedEndOfJSON(msg string, cursor int64) *SyntaxError { + return &SyntaxError{ + msg: fmt.Sprintf("json: %s unexpected end of JSON input", msg), + Offset: cursor, + } +} + +func ErrExpected(msg string, cursor int64) *SyntaxError { + return &SyntaxError{msg: fmt.Sprintf("expected %s", msg), Offset: cursor} +} + +func ErrInvalidCharacter(c byte, context string, cursor int64) *SyntaxError { + if c == 0 { + return &SyntaxError{ + msg: fmt.Sprintf("json: invalid character as %s", context), + Offset: cursor, + } + } + return &SyntaxError{ + msg: fmt.Sprintf("json: invalid character %c as %s", c, context), + Offset: cursor, + } +} + +func ErrInvalidBeginningOfValue(c byte, cursor int64) *SyntaxError { + return &SyntaxError{ + msg: fmt.Sprintf("invalid character '%c' looking for beginning of value", c), + Offset: cursor, + } +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/runtime/rtype.go b/index/server/vendor/github.com/goccy/go-json/internal/runtime/rtype.go new file mode 100644 index 00000000..4db10deb --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/runtime/rtype.go @@ -0,0 +1,263 @@ +package runtime + +import ( + "reflect" + "unsafe" +) + +// Type representing reflect.rtype for noescape trick +type Type struct{} + +//go:linkname rtype_Align reflect.(*rtype).Align +//go:noescape +func rtype_Align(*Type) int + +func (t *Type) Align() int { + return rtype_Align(t) +} + +//go:linkname rtype_FieldAlign reflect.(*rtype).FieldAlign +//go:noescape +func rtype_FieldAlign(*Type) int + +func (t *Type) FieldAlign() int { + return rtype_FieldAlign(t) +} + +//go:linkname rtype_Method reflect.(*rtype).Method +//go:noescape +func rtype_Method(*Type, int) reflect.Method + +func (t *Type) Method(a0 int) reflect.Method { + return rtype_Method(t, a0) +} + +//go:linkname rtype_MethodByName reflect.(*rtype).MethodByName +//go:noescape +func rtype_MethodByName(*Type, string) (reflect.Method, bool) + +func (t *Type) MethodByName(a0 string) (reflect.Method, bool) { + return rtype_MethodByName(t, a0) +} + +//go:linkname rtype_NumMethod reflect.(*rtype).NumMethod +//go:noescape +func rtype_NumMethod(*Type) int + +func (t *Type) NumMethod() int { + return rtype_NumMethod(t) +} + +//go:linkname rtype_Name reflect.(*rtype).Name +//go:noescape +func rtype_Name(*Type) string + +func (t *Type) Name() string { + return rtype_Name(t) +} + +//go:linkname rtype_PkgPath reflect.(*rtype).PkgPath +//go:noescape +func rtype_PkgPath(*Type) string + +func (t *Type) PkgPath() string { + return rtype_PkgPath(t) +} + +//go:linkname rtype_Size reflect.(*rtype).Size +//go:noescape +func rtype_Size(*Type) uintptr + +func (t *Type) Size() uintptr { + return rtype_Size(t) +} + +//go:linkname rtype_String reflect.(*rtype).String +//go:noescape +func rtype_String(*Type) string + +func (t *Type) String() string { + return rtype_String(t) +} + +//go:linkname rtype_Kind reflect.(*rtype).Kind +//go:noescape +func rtype_Kind(*Type) reflect.Kind + +func (t *Type) Kind() reflect.Kind { + return rtype_Kind(t) +} + +//go:linkname rtype_Implements reflect.(*rtype).Implements +//go:noescape +func rtype_Implements(*Type, reflect.Type) bool + +func (t *Type) Implements(u reflect.Type) bool { + return rtype_Implements(t, u) +} + +//go:linkname rtype_AssignableTo reflect.(*rtype).AssignableTo +//go:noescape +func rtype_AssignableTo(*Type, reflect.Type) bool + +func (t *Type) AssignableTo(u reflect.Type) bool { + return rtype_AssignableTo(t, u) +} + +//go:linkname rtype_ConvertibleTo reflect.(*rtype).ConvertibleTo +//go:noescape +func rtype_ConvertibleTo(*Type, reflect.Type) bool + +func (t *Type) ConvertibleTo(u reflect.Type) bool { + return rtype_ConvertibleTo(t, u) +} + +//go:linkname rtype_Comparable reflect.(*rtype).Comparable +//go:noescape +func rtype_Comparable(*Type) bool + +func (t *Type) Comparable() bool { + return rtype_Comparable(t) +} + +//go:linkname rtype_Bits reflect.(*rtype).Bits +//go:noescape +func rtype_Bits(*Type) int + +func (t *Type) Bits() int { + return rtype_Bits(t) +} + +//go:linkname rtype_ChanDir reflect.(*rtype).ChanDir +//go:noescape +func rtype_ChanDir(*Type) reflect.ChanDir + +func (t *Type) ChanDir() reflect.ChanDir { + return rtype_ChanDir(t) +} + +//go:linkname rtype_IsVariadic reflect.(*rtype).IsVariadic +//go:noescape +func rtype_IsVariadic(*Type) bool + +func (t *Type) IsVariadic() bool { + return rtype_IsVariadic(t) +} + +//go:linkname rtype_Elem reflect.(*rtype).Elem +//go:noescape +func rtype_Elem(*Type) reflect.Type + +func (t *Type) Elem() *Type { + return Type2RType(rtype_Elem(t)) +} + +//go:linkname rtype_Field reflect.(*rtype).Field +//go:noescape +func rtype_Field(*Type, int) reflect.StructField + +func (t *Type) Field(i int) reflect.StructField { + return rtype_Field(t, i) +} + +//go:linkname rtype_FieldByIndex reflect.(*rtype).FieldByIndex +//go:noescape +func rtype_FieldByIndex(*Type, []int) reflect.StructField + +func (t *Type) FieldByIndex(index []int) reflect.StructField { + return rtype_FieldByIndex(t, index) +} + +//go:linkname rtype_FieldByName reflect.(*rtype).FieldByName +//go:noescape +func rtype_FieldByName(*Type, string) (reflect.StructField, bool) + +func (t *Type) FieldByName(name string) (reflect.StructField, bool) { + return rtype_FieldByName(t, name) +} + +//go:linkname rtype_FieldByNameFunc reflect.(*rtype).FieldByNameFunc +//go:noescape +func rtype_FieldByNameFunc(*Type, func(string) bool) (reflect.StructField, bool) + +func (t *Type) FieldByNameFunc(match func(string) bool) (reflect.StructField, bool) { + return rtype_FieldByNameFunc(t, match) +} + +//go:linkname rtype_In reflect.(*rtype).In +//go:noescape +func rtype_In(*Type, int) reflect.Type + +func (t *Type) In(i int) reflect.Type { + return rtype_In(t, i) +} + +//go:linkname rtype_Key reflect.(*rtype).Key +//go:noescape +func rtype_Key(*Type) reflect.Type + +func (t *Type) Key() *Type { + return Type2RType(rtype_Key(t)) +} + +//go:linkname rtype_Len reflect.(*rtype).Len +//go:noescape +func rtype_Len(*Type) int + +func (t *Type) Len() int { + return rtype_Len(t) +} + +//go:linkname rtype_NumField reflect.(*rtype).NumField +//go:noescape +func rtype_NumField(*Type) int + +func (t *Type) NumField() int { + return rtype_NumField(t) +} + +//go:linkname rtype_NumIn reflect.(*rtype).NumIn +//go:noescape +func rtype_NumIn(*Type) int + +func (t *Type) NumIn() int { + return rtype_NumIn(t) +} + +//go:linkname rtype_NumOut reflect.(*rtype).NumOut +//go:noescape +func rtype_NumOut(*Type) int + +func (t *Type) NumOut() int { + return rtype_NumOut(t) +} + +//go:linkname rtype_Out reflect.(*rtype).Out +//go:noescape +func rtype_Out(*Type, int) reflect.Type + +//go:linkname PtrTo reflect.(*rtype).ptrTo +//go:noescape +func PtrTo(*Type) *Type + +func (t *Type) Out(i int) reflect.Type { + return rtype_Out(t, i) +} + +//go:linkname IfaceIndir reflect.ifaceIndir +//go:noescape +func IfaceIndir(*Type) bool + +//go:linkname RType2Type reflect.toType +//go:noescape +func RType2Type(t *Type) reflect.Type + +//go:nolint structcheck +type emptyInterface struct { + _ *Type + ptr unsafe.Pointer +} + +func Type2RType(t reflect.Type) *Type { + return (*Type)(((*emptyInterface)(unsafe.Pointer(&t))).ptr) +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/runtime/struct_field.go b/index/server/vendor/github.com/goccy/go-json/internal/runtime/struct_field.go new file mode 100644 index 00000000..baab0c59 --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/runtime/struct_field.go @@ -0,0 +1,91 @@ +package runtime + +import ( + "reflect" + "strings" + "unicode" +) + +func getTag(field reflect.StructField) string { + return field.Tag.Get("json") +} + +func IsIgnoredStructField(field reflect.StructField) bool { + if field.PkgPath != "" { + if field.Anonymous { + t := field.Type + if t.Kind() == reflect.Ptr { + t = t.Elem() + } + if t.Kind() != reflect.Struct { + return true + } + } else { + // private field + return true + } + } + tag := getTag(field) + return tag == "-" +} + +type StructTag struct { + Key string + IsTaggedKey bool + IsOmitEmpty bool + IsString bool + Field reflect.StructField +} + +type StructTags []*StructTag + +func (t StructTags) ExistsKey(key string) bool { + for _, tt := range t { + if tt.Key == key { + return true + } + } + return false +} + +func isValidTag(s string) bool { + if s == "" { + return false + } + for _, c := range s { + switch { + case strings.ContainsRune("!#$%&()*+-./:<=>?@[]^_{|}~ ", c): + // Backslash and quote chars are reserved, but + // otherwise any punctuation chars are allowed + // in a tag name. + case !unicode.IsLetter(c) && !unicode.IsDigit(c): + return false + } + } + return true +} + +func StructTagFromField(field reflect.StructField) *StructTag { + keyName := field.Name + tag := getTag(field) + st := &StructTag{Field: field} + opts := strings.Split(tag, ",") + if len(opts) > 0 { + if opts[0] != "" && isValidTag(opts[0]) { + keyName = opts[0] + st.IsTaggedKey = true + } + } + st.Key = keyName + if len(opts) > 1 { + for _, opt := range opts[1:] { + switch opt { + case "omitempty": + st.IsOmitEmpty = true + case "string": + st.IsString = true + } + } + } + return st +} diff --git a/index/server/vendor/github.com/goccy/go-json/internal/runtime/type.go b/index/server/vendor/github.com/goccy/go-json/internal/runtime/type.go new file mode 100644 index 00000000..0167cd2c --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/internal/runtime/type.go @@ -0,0 +1,100 @@ +package runtime + +import ( + "reflect" + "unsafe" +) + +type SliceHeader struct { + Data unsafe.Pointer + Len int + Cap int +} + +const ( + maxAcceptableTypeAddrRange = 1024 * 1024 * 2 // 2 Mib +) + +type TypeAddr struct { + BaseTypeAddr uintptr + MaxTypeAddr uintptr + AddrRange uintptr + AddrShift uintptr +} + +var ( + typeAddr *TypeAddr + alreadyAnalyzed bool +) + +//go:linkname typelinks reflect.typelinks +func typelinks() ([]unsafe.Pointer, [][]int32) + +//go:linkname rtypeOff reflect.rtypeOff +func rtypeOff(unsafe.Pointer, int32) unsafe.Pointer + +func AnalyzeTypeAddr() *TypeAddr { + defer func() { + alreadyAnalyzed = true + }() + if alreadyAnalyzed { + return typeAddr + } + sections, offsets := typelinks() + if len(sections) != 1 { + return nil + } + if len(offsets) != 1 { + return nil + } + section := sections[0] + offset := offsets[0] + var ( + min uintptr = uintptr(^uint(0)) + max uintptr = 0 + isAligned64 = true + isAligned32 = true + ) + for i := 0; i < len(offset); i++ { + typ := (*Type)(rtypeOff(section, offset[i])) + addr := uintptr(unsafe.Pointer(typ)) + if min > addr { + min = addr + } + if max < addr { + max = addr + } + if typ.Kind() == reflect.Ptr { + addr = uintptr(unsafe.Pointer(typ.Elem())) + if min > addr { + min = addr + } + if max < addr { + max = addr + } + } + isAligned64 = isAligned64 && (addr-min)&63 == 0 + isAligned32 = isAligned32 && (addr-min)&31 == 0 + } + addrRange := max - min + if addrRange == 0 { + return nil + } + var addrShift uintptr + if isAligned64 { + addrShift = 6 + } else if isAligned32 { + addrShift = 5 + } + cacheSize := addrRange >> addrShift + if cacheSize > maxAcceptableTypeAddrRange { + return nil + } + typeAddr = &TypeAddr{ + BaseTypeAddr: min, + MaxTypeAddr: max, + AddrRange: addrRange, + AddrShift: addrShift, + } + return typeAddr +} diff --git a/index/server/vendor/github.com/goccy/go-json/json.go b/index/server/vendor/github.com/goccy/go-json/json.go new file mode 100644 index 00000000..413cb20b --- /dev/null +++ b/index/server/vendor/github.com/goccy/go-json/json.go @@ -0,0 +1,371 @@ +package json + +import ( + "bytes" + "context" + "encoding/json" + + "github.com/goccy/go-json/internal/encoder" +) + +// Marshaler is the interface implemented by types that +// can marshal themselves into valid JSON. +type Marshaler interface { + MarshalJSON() ([]byte, error) +} + +// MarshalerContext is the interface implemented by types that +// can marshal themselves into valid JSON with context.Context. +type MarshalerContext interface { + MarshalJSON(context.Context) ([]byte, error) +} + +// Unmarshaler is the interface implemented by types +// that can unmarshal a JSON description of themselves. +// The input can be assumed to be a valid encoding of +// a JSON value. UnmarshalJSON must copy the JSON data +// if it wishes to retain the data after returning. +// +// By convention, to approximate the behavior of Unmarshal itself, +// Unmarshalers implement UnmarshalJSON([]byte("null")) as a no-op. +type Unmarshaler interface { + UnmarshalJSON([]byte) error +} + +// UnmarshalerContext is the interface implemented by types +// that can unmarshal with context.Context a JSON description of themselves. +type UnmarshalerContext interface { + UnmarshalJSON(context.Context, []byte) error +} + +// Marshal returns the JSON encoding of v. +// +// Marshal traverses the value v recursively. +// If an encountered value implements the Marshaler interface +// and is not a nil pointer, Marshal calls its MarshalJSON method +// to produce JSON. If no MarshalJSON method is present but the +// value implements encoding.TextMarshaler instead, Marshal calls +// its MarshalText method and encodes the result as a JSON string. +// The nil pointer exception is not strictly necessary +// but mimics a similar, necessary exception in the behavior of +// UnmarshalJSON. +// +// Otherwise, Marshal uses the following type-dependent default encodings: +// +// Boolean values encode as JSON booleans. +// +// Floating point, integer, and Number values encode as JSON numbers. +// +// String values encode as JSON strings coerced to valid UTF-8, +// replacing invalid bytes with the Unicode replacement rune. +// The angle brackets "<" and ">" are escaped to "\u003c" and "\u003e" +// to keep some browsers from misinterpreting JSON output as HTML. +// Ampersand "&" is also escaped to "\u0026" for the same reason. +// This escaping can be disabled using an Encoder that had SetEscapeHTML(false) +// called on it. +// +// Array and slice values encode as JSON arrays, except that +// []byte encodes as a base64-encoded string, and a nil slice +// encodes as the null JSON value. +// +// Struct values encode as JSON objects. +// Each exported struct field becomes a member of the object, using the +// field name as the object key, unless the field is omitted for one of the +// reasons given below. +// +// The encoding of each struct field can be customized by the format string +// stored under the "json" key in the struct field's tag. +// The format string gives the name of the field, possibly followed by a +// comma-separated list of options. The name may be empty in order to +// specify options without overriding the default field name. +// +// The "omitempty" option specifies that the field should be omitted +// from the encoding if the field has an empty value, defined as +// false, 0, a nil pointer, a nil interface value, and any empty array, +// slice, map, or string. +// +// As a special case, if the field tag is "-", the field is always omitted. +// Note that a field with name "-" can still be generated using the tag "-,". +// +// Examples of struct field tags and their meanings: +// +// // Field appears in JSON as key "myName". +// Field int `json:"myName"` +// +// // Field appears in JSON as key "myName" and +// // the field is omitted from the object if its value is empty, +// // as defined above. +// Field int `json:"myName,omitempty"` +// +// // Field appears in JSON as key "Field" (the default), but +// // the field is skipped if empty. +// // Note the leading comma. +// Field int `json:",omitempty"` +// +// // Field is ignored by this package. +// Field int `json:"-"` +// +// // Field appears in JSON as key "-". +// Field int `json:"-,"` +// +// The "string" option signals that a field is stored as JSON inside a +// JSON-encoded string. It applies only to fields of string, floating point, +// integer, or boolean types. This extra level of encoding is sometimes used +// when communicating with JavaScript programs: +// +// Int64String int64 `json:",string"` +// +// The key name will be used if it's a non-empty string consisting of +// only Unicode letters, digits, and ASCII punctuation except quotation +// marks, backslash, and comma. +// +// Anonymous struct fields are usually marshaled as if their inner exported fields +// were fields in the outer struct, subject to the usual Go visibility rules amended +// as described in the next paragraph. +// An anonymous struct field with a name given in its JSON tag is treated as +// having that name, rather than being anonymous. +// An anonymous struct field of interface type is treated the same as having +// that type as its name, rather than being anonymous. +// +// The Go visibility rules for struct fields are amended for JSON when +// deciding which field to marshal or unmarshal. If there are +// multiple fields at the same level, and that level is the least +// nested (and would therefore be the nesting level selected by the +// usual Go rules), the following extra rules apply: +// +// 1) Of those fields, if any are JSON-tagged, only tagged fields are considered, +// even if there are multiple untagged fields that would otherwise conflict. +// +// 2) If there is exactly one field (tagged or not according to the first rule), that is selected. +// +// 3) Otherwise there are multiple fields, and all are ignored; no error occurs. +// +// Handling of anonymous struct fields is new in Go 1.1. +// Prior to Go 1.1, anonymous struct fields were ignored. To force ignoring of +// an anonymous struct field in both current and earlier versions, give the field +// a JSON tag of "-". +// +// Map values encode as JSON objects. The map's key type must either be a +// string, an integer type, or implement encoding.TextMarshaler. The map keys +// are sorted and used as JSON object keys by applying the following rules, +// subject to the UTF-8 coercion described for string values above: +// - string keys are used directly +// - encoding.TextMarshalers are marshaled +// - integer keys are converted to strings +// +// Pointer values encode as the value pointed to. +// A nil pointer encodes as the null JSON value. +// +// Interface values encode as the value contained in the interface. +// A nil interface value encodes as the null JSON value. +// +// Channel, complex, and function values cannot be encoded in JSON. +// Attempting to encode such a value causes Marshal to return +// an UnsupportedTypeError. +// +// JSON cannot represent cyclic data structures and Marshal does not +// handle them. Passing cyclic structures to Marshal will result in +// an infinite recursion. +// +func Marshal(v interface{}) ([]byte, error) { + return MarshalWithOption(v) +} + +// MarshalNoEscape returns the JSON encoding of v and doesn't escape v. +func MarshalNoEscape(v interface{}) ([]byte, error) { + return marshalNoEscape(v) +} + +// MarshalContext returns the JSON encoding of v with context.Context and EncodeOption. +func MarshalContext(ctx context.Context, v interface{}, optFuncs ...EncodeOptionFunc) ([]byte, error) { + return marshalContext(ctx, v, optFuncs...) +} + +// MarshalWithOption returns the JSON encoding of v with EncodeOption. +func MarshalWithOption(v interface{}, optFuncs ...EncodeOptionFunc) ([]byte, error) { + return marshal(v, optFuncs...) +} + +// MarshalIndent is like Marshal but applies Indent to format the output. +// Each JSON element in the output will begin on a new line beginning with prefix +// followed by one or more copies of indent according to the indentation nesting. +func MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) { + return MarshalIndentWithOption(v, prefix, indent) +} + +// MarshalIndentWithOption is like Marshal but applies Indent to format the output with EncodeOption. +func MarshalIndentWithOption(v interface{}, prefix, indent string, optFuncs ...EncodeOptionFunc) ([]byte, error) { + return marshalIndent(v, prefix, indent, optFuncs...) +} + +// Unmarshal parses the JSON-encoded data and stores the result +// in the value pointed to by v. If v is nil or not a pointer, +// Unmarshal returns an InvalidUnmarshalError. +// +// Unmarshal uses the inverse of the encodings that +// Marshal uses, allocating maps, slices, and pointers as necessary, +// with the following additional rules: +// +// To unmarshal JSON into a pointer, Unmarshal first handles the case of +// the JSON being the JSON literal null. In that case, Unmarshal sets +// the pointer to nil. Otherwise, Unmarshal unmarshals the JSON into +// the value pointed at by the pointer. If the pointer is nil, Unmarshal +// allocates a new value for it to point to. +// +// To unmarshal JSON into a value implementing the Unmarshaler interface, +// Unmarshal calls that value's UnmarshalJSON method, including +// when the input is a JSON null. +// Otherwise, if the value implements encoding.TextUnmarshaler +// and the input is a JSON quoted string, Unmarshal calls that value's +// UnmarshalText method with the unquoted form of the string. +// +// To unmarshal JSON into a struct, Unmarshal matches incoming object +// keys to the keys used by Marshal (either the struct field name or its tag), +// preferring an exact match but also accepting a case-insensitive match. By +// default, object keys which don't have a corresponding struct field are +// ignored (see Decoder.DisallowUnknownFields for an alternative). +// +// To unmarshal JSON into an interface value, +// Unmarshal stores one of these in the interface value: +// +// bool, for JSON booleans +// float64, for JSON numbers +// string, for JSON strings +// []interface{}, for JSON arrays +// map[string]interface{}, for JSON objects +// nil for JSON null +// +// To unmarshal a JSON array into a slice, Unmarshal resets the slice length +// to zero and then appends each element to the slice. +// As a special case, to unmarshal an empty JSON array into a slice, +// Unmarshal replaces the slice with a new empty slice. +// +// To unmarshal a JSON array into a Go array, Unmarshal decodes +// JSON array elements into corresponding Go array elements. +// If the Go array is smaller than the JSON array, +// the additional JSON array elements are discarded. +// If the JSON array is smaller than the Go array, +// the additional Go array elements are set to zero values. +// +// To unmarshal a JSON object into a map, Unmarshal first establishes a map to +// use. If the map is nil, Unmarshal allocates a new map. Otherwise Unmarshal +// reuses the existing map, keeping existing entries. Unmarshal then stores +// key-value pairs from the JSON object into the map. The map's key type must +// either be any string type, an integer, implement json.Unmarshaler, or +// implement encoding.TextUnmarshaler. +// +// If a JSON value is not appropriate for a given target type, +// or if a JSON number overflows the target type, Unmarshal +// skips that field and completes the unmarshaling as best it can. +// If no more serious errors are encountered, Unmarshal returns +// an UnmarshalTypeError describing the earliest such error. In any +// case, it's not guaranteed that all the remaining fields following +// the problematic one will be unmarshaled into the target object. +// +// The JSON null value unmarshals into an interface, map, pointer, or slice +// by setting that Go value to nil. Because null is often used in JSON to mean +// ``not present,'' unmarshaling a JSON null into any other Go type has no effect +// on the value and produces no error. +// +// When unmarshaling quoted strings, invalid UTF-8 or +// invalid UTF-16 surrogate pairs are not treated as an error. +// Instead, they are replaced by the Unicode replacement +// character U+FFFD. +// +func Unmarshal(data []byte, v interface{}) error { + return unmarshal(data, v) +} + +// UnmarshalContext parses the JSON-encoded data and stores the result +// in the value pointed to by v. If you implement the UnmarshalerContext interface, +// call it with ctx as an argument. +func UnmarshalContext(ctx context.Context, data []byte, v interface{}, optFuncs ...DecodeOptionFunc) error { + return unmarshalContext(ctx, data, v) +} + +func UnmarshalWithOption(data []byte, v interface{}, optFuncs ...DecodeOptionFunc) error { + return unmarshal(data, v, optFuncs...) +} + +func UnmarshalNoEscape(data []byte, v interface{}, optFuncs ...DecodeOptionFunc) error { + return unmarshalNoEscape(data, v, optFuncs...) +} + +// A Token holds a value of one of these types: +// +// Delim, for the four JSON delimiters [ ] { } +// bool, for JSON booleans +// float64, for JSON numbers +// Number, for JSON numbers +// string, for JSON string literals +// nil, for JSON null +// +type Token = json.Token + +// A Number represents a JSON number literal. +type Number = json.Number + +// RawMessage is a raw encoded JSON value. +// It implements Marshaler and Unmarshaler and can +// be used to delay JSON decoding or precompute a JSON encoding. +type RawMessage = json.RawMessage + +// A Delim is a JSON array or object delimiter, one of [ ] { or }. +type Delim = json.Delim + +// Compact appends to dst the JSON-encoded src with +// insignificant space characters elided. +func Compact(dst *bytes.Buffer, src []byte) error { + return encoder.Compact(dst, src, false) +} + +// Indent appends to dst an indented form of the JSON-encoded src. +// Each element in a JSON object or array begins on a new, +// indented line beginning with prefix followed by one or more +// copies of indent according to the indentation nesting. +// The data appended to dst does not begin with the prefix nor +// any indentation, to make it easier to embed inside other formatted JSON data. +// Although leading space characters (space, tab, carriage return, newline) +// at the beginning of src are dropped, trailing space characters +// at the end of src are preserved and copied to dst. +// For example, if src has no trailing spaces, neither will dst; +// if src ends in a trailing newline, so will dst. +func Indent(dst *bytes.Buffer, src []byte, prefix, indent string) error { + return encoder.Indent(dst, src, prefix, indent) +} + +// HTMLEscape appends to dst the JSON-encoded src with <, >, &, U+2028 and U+2029 +// characters inside string literals changed to \u003c, \u003e, \u0026, \u2028, \u2029 +// so that the JSON will be safe to embed inside HTML