From 0a84304da10690a17346df54bd0a3721384d4c76 Mon Sep 17 00:00:00 2001 From: dsinghvi Date: Thu, 6 Feb 2025 16:02:58 +0000 Subject: [PATCH] chore(python): update pydantic seed --- .../accept-header/.github/workflows/ci.yml | 61 ++++ seed/pydantic/accept-header/.gitignore | 5 + .../accept-header/.mock/definition/api.yml | 4 + .../.mock/definition/service.yml | 16 ++ .../accept-header/.mock/fern.config.json | 1 + .../accept-header/.mock/generators.yml | 1 + seed/pydantic/accept-header/README.md | 0 seed/pydantic/accept-header/pyproject.toml | 61 ++++ .../accept-header/snippet-templates.json | 0 seed/pydantic/accept-header/snippet.json | 0 .../accept-header/src/seed/accept/__init__.py | 2 + .../src/seed/accept/core/__init__.py | 25 ++ .../src/seed/accept/core/datetime_utils.py | 28 ++ .../seed/accept/core/pydantic_utilities.py | 265 +++++++++++++++++ .../src/seed/accept/core/serialization.py | 272 ++++++++++++++++++ .../accept-header/src/seed/accept/py.typed | 0 .../accept-header/tests/custom/test_client.py | 7 + 17 files changed, 748 insertions(+) create mode 100644 seed/pydantic/accept-header/.github/workflows/ci.yml create mode 100644 seed/pydantic/accept-header/.gitignore create mode 100644 seed/pydantic/accept-header/.mock/definition/api.yml create mode 100644 seed/pydantic/accept-header/.mock/definition/service.yml create mode 100644 seed/pydantic/accept-header/.mock/fern.config.json create mode 100644 seed/pydantic/accept-header/.mock/generators.yml create mode 100644 seed/pydantic/accept-header/README.md create mode 100644 seed/pydantic/accept-header/pyproject.toml create mode 100644 seed/pydantic/accept-header/snippet-templates.json create mode 100644 seed/pydantic/accept-header/snippet.json create mode 100644 seed/pydantic/accept-header/src/seed/accept/__init__.py create mode 100644 seed/pydantic/accept-header/src/seed/accept/core/__init__.py create mode 100644 seed/pydantic/accept-header/src/seed/accept/core/datetime_utils.py create mode 100644 seed/pydantic/accept-header/src/seed/accept/core/pydantic_utilities.py create mode 100644 seed/pydantic/accept-header/src/seed/accept/core/serialization.py create mode 100644 seed/pydantic/accept-header/src/seed/accept/py.typed create mode 100644 seed/pydantic/accept-header/tests/custom/test_client.py diff --git a/seed/pydantic/accept-header/.github/workflows/ci.yml b/seed/pydantic/accept-header/.github/workflows/ci.yml new file mode 100644 index 00000000000..b204fa604e2 --- /dev/null +++ b/seed/pydantic/accept-header/.github/workflows/ci.yml @@ -0,0 +1,61 @@ +name: ci + +on: [push] +jobs: + compile: + runs-on: ubuntu-20.04 + steps: + - name: Checkout repo + uses: actions/checkout@v3 + - name: Set up python + uses: actions/setup-python@v4 + with: + python-version: 3.8 + - name: Bootstrap poetry + run: | + curl -sSL https://install.python-poetry.org | python - -y --version 1.5.1 + - name: Install dependencies + run: poetry install + - name: Compile + run: poetry run mypy . + test: + runs-on: ubuntu-20.04 + steps: + - name: Checkout repo + uses: actions/checkout@v3 + - name: Set up python + uses: actions/setup-python@v4 + with: + python-version: 3.8 + - name: Bootstrap poetry + run: | + curl -sSL https://install.python-poetry.org | python - -y --version 1.5.1 + - name: Install dependencies + run: poetry install + + - name: Test + run: poetry run pytest -rP . + + publish: + needs: [compile, test] + if: github.event_name == 'push' && contains(github.ref, 'refs/tags/') + runs-on: ubuntu-20.04 + steps: + - name: Checkout repo + uses: actions/checkout@v3 + - name: Set up python + uses: actions/setup-python@v4 + with: + python-version: 3.8 + - name: Bootstrap poetry + run: | + curl -sSL https://install.python-poetry.org | python - -y --version 1.5.1 + - name: Install dependencies + run: poetry install + - name: Publish to pypi + run: | + poetry config repositories.remote + poetry --no-interaction -v publish --build --repository remote --username "$" --password "$" + env: + : ${{ secrets. }} + : ${{ secrets. }} diff --git a/seed/pydantic/accept-header/.gitignore b/seed/pydantic/accept-header/.gitignore new file mode 100644 index 00000000000..0da665feeef --- /dev/null +++ b/seed/pydantic/accept-header/.gitignore @@ -0,0 +1,5 @@ +dist/ +.mypy_cache/ +__pycache__/ +poetry.toml +.ruff_cache/ diff --git a/seed/pydantic/accept-header/.mock/definition/api.yml b/seed/pydantic/accept-header/.mock/definition/api.yml new file mode 100644 index 00000000000..4c370c1686d --- /dev/null +++ b/seed/pydantic/accept-header/.mock/definition/api.yml @@ -0,0 +1,4 @@ +name: accept +auth: bearer +error-discrimination: + strategy: status-code diff --git a/seed/pydantic/accept-header/.mock/definition/service.yml b/seed/pydantic/accept-header/.mock/definition/service.yml new file mode 100644 index 00000000000..1ea148bbf00 --- /dev/null +++ b/seed/pydantic/accept-header/.mock/definition/service.yml @@ -0,0 +1,16 @@ +errors: + NotFoundError: + docs: Admin not found + status-code: 404 + type: unknown + +service: + auth: true + base-path: /container + endpoints: + endpoint: + errors: + - NotFoundError + method: DELETE + path: / + diff --git a/seed/pydantic/accept-header/.mock/fern.config.json b/seed/pydantic/accept-header/.mock/fern.config.json new file mode 100644 index 00000000000..4c8e54ac313 --- /dev/null +++ b/seed/pydantic/accept-header/.mock/fern.config.json @@ -0,0 +1 @@ +{"organization": "fern-test", "version": "*"} \ No newline at end of file diff --git a/seed/pydantic/accept-header/.mock/generators.yml b/seed/pydantic/accept-header/.mock/generators.yml new file mode 100644 index 00000000000..0967ef424bc --- /dev/null +++ b/seed/pydantic/accept-header/.mock/generators.yml @@ -0,0 +1 @@ +{} diff --git a/seed/pydantic/accept-header/README.md b/seed/pydantic/accept-header/README.md new file mode 100644 index 00000000000..e69de29bb2d diff --git a/seed/pydantic/accept-header/pyproject.toml b/seed/pydantic/accept-header/pyproject.toml new file mode 100644 index 00000000000..88ffc220447 --- /dev/null +++ b/seed/pydantic/accept-header/pyproject.toml @@ -0,0 +1,61 @@ +[project] +name = "fern_accept-header" + +[tool.poetry] +name = "fern_accept-header" +version = "0.0.1" +description = "" +readme = "README.md" +authors = [] +keywords = [] + +classifiers = [ + "Intended Audience :: Developers", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: OS Independent", + "Operating System :: POSIX", + "Operating System :: MacOS", + "Operating System :: POSIX :: Linux", + "Operating System :: Microsoft :: Windows", + "Topic :: Software Development :: Libraries :: Python Modules", + "Typing :: Typed" +] +packages = [ + { include = "seed/accept", from = "src"} +] + +[project.urls] +Repository = 'https://github.com/accept-header/fern' + +[tool.poetry.dependencies] +python = "^3.8" +pydantic-core = "^2.18.2" + +[tool.poetry.dev-dependencies] +mypy = "1.0.1" +pytest = "^7.4.0" +pytest-asyncio = "^0.23.5" +python-dateutil = "^2.9.0" +types-python-dateutil = "^2.9.0.20240316" +ruff = "^0.5.6" + +[tool.pytest.ini_options] +testpaths = [ "tests" ] +asyncio_mode = "auto" + +[tool.mypy] +plugins = ["pydantic.mypy"] + +[tool.ruff] +line-length = 120 + + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/seed/pydantic/accept-header/snippet-templates.json b/seed/pydantic/accept-header/snippet-templates.json new file mode 100644 index 00000000000..e69de29bb2d diff --git a/seed/pydantic/accept-header/snippet.json b/seed/pydantic/accept-header/snippet.json new file mode 100644 index 00000000000..e69de29bb2d diff --git a/seed/pydantic/accept-header/src/seed/accept/__init__.py b/seed/pydantic/accept-header/src/seed/accept/__init__.py new file mode 100644 index 00000000000..f3ea2659bb1 --- /dev/null +++ b/seed/pydantic/accept-header/src/seed/accept/__init__.py @@ -0,0 +1,2 @@ +# This file was auto-generated by Fern from our API Definition. + diff --git a/seed/pydantic/accept-header/src/seed/accept/core/__init__.py b/seed/pydantic/accept-header/src/seed/accept/core/__init__.py new file mode 100644 index 00000000000..9c7cd65aa25 --- /dev/null +++ b/seed/pydantic/accept-header/src/seed/accept/core/__init__.py @@ -0,0 +1,25 @@ +# This file was auto-generated by Fern from our API Definition. + +from .datetime_utils import serialize_datetime +from .pydantic_utilities import ( + IS_PYDANTIC_V2, + UniversalBaseModel, + UniversalRootModel, + parse_obj_as, + universal_field_validator, + universal_root_validator, + update_forward_refs, +) +from .serialization import FieldMetadata + +__all__ = [ + "FieldMetadata", + "IS_PYDANTIC_V2", + "UniversalBaseModel", + "UniversalRootModel", + "parse_obj_as", + "serialize_datetime", + "universal_field_validator", + "universal_root_validator", + "update_forward_refs", +] diff --git a/seed/pydantic/accept-header/src/seed/accept/core/datetime_utils.py b/seed/pydantic/accept-header/src/seed/accept/core/datetime_utils.py new file mode 100644 index 00000000000..7c9864a944c --- /dev/null +++ b/seed/pydantic/accept-header/src/seed/accept/core/datetime_utils.py @@ -0,0 +1,28 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt + + +def serialize_datetime(v: dt.datetime) -> str: + """ + Serialize a datetime including timezone info. + + Uses the timezone info provided if present, otherwise uses the current runtime's timezone info. + + UTC datetimes end in "Z" while all other timezones are represented as offset from UTC, e.g. +05:00. + """ + + def _serialize_zoned_datetime(v: dt.datetime) -> str: + if v.tzinfo is not None and v.tzinfo.tzname(None) == dt.timezone.utc.tzname(None): + # UTC is a special case where we use "Z" at the end instead of "+00:00" + return v.isoformat().replace("+00:00", "Z") + else: + # Delegate to the typical +/- offset format + return v.isoformat() + + if v.tzinfo is not None: + return _serialize_zoned_datetime(v) + else: + local_tz = dt.datetime.now().astimezone().tzinfo + localized_dt = v.replace(tzinfo=local_tz) + return _serialize_zoned_datetime(localized_dt) diff --git a/seed/pydantic/accept-header/src/seed/accept/core/pydantic_utilities.py b/seed/pydantic/accept-header/src/seed/accept/core/pydantic_utilities.py new file mode 100644 index 00000000000..6cda82ecdfb --- /dev/null +++ b/seed/pydantic/accept-header/src/seed/accept/core/pydantic_utilities.py @@ -0,0 +1,265 @@ +# This file was auto-generated by Fern from our API Definition. + +# nopycln: file +import datetime as dt +import typing +from collections import defaultdict + +import typing_extensions + +import pydantic + +from .datetime_utils import serialize_datetime + +IS_PYDANTIC_V2 = pydantic.VERSION.startswith("2.") + +if IS_PYDANTIC_V2: + # isort will try to reformat the comments on these imports, which breaks mypy + # isort: off + from pydantic.v1.datetime_parse import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2 + parse_date as parse_date, + ) + from pydantic.v1.datetime_parse import ( # pyright: ignore[reportMissingImports] # Pydantic v2 + parse_datetime as parse_datetime, + ) + from pydantic.v1.json import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2 + ENCODERS_BY_TYPE as encoders_by_type, + ) + from pydantic.v1.typing import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2 + get_args as get_args, + ) + from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2 + get_origin as get_origin, + ) + from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2 + is_literal_type as is_literal_type, + ) + from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2 + is_union as is_union, + ) + from pydantic.v1.fields import ModelField as ModelField # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2 +else: + from pydantic.datetime_parse import parse_date as parse_date # type: ignore # Pydantic v1 + from pydantic.datetime_parse import parse_datetime as parse_datetime # type: ignore # Pydantic v1 + from pydantic.fields import ModelField as ModelField # type: ignore # Pydantic v1 + from pydantic.json import ENCODERS_BY_TYPE as encoders_by_type # type: ignore # Pydantic v1 + from pydantic.typing import get_args as get_args # type: ignore # Pydantic v1 + from pydantic.typing import get_origin as get_origin # type: ignore # Pydantic v1 + from pydantic.typing import is_literal_type as is_literal_type # type: ignore # Pydantic v1 + from pydantic.typing import is_union as is_union # type: ignore # Pydantic v1 + + # isort: on + + +T = typing.TypeVar("T") +Model = typing.TypeVar("Model", bound=pydantic.BaseModel) + + +def parse_obj_as(type_: typing.Type[T], object_: typing.Any) -> T: + if IS_PYDANTIC_V2: + adapter = pydantic.TypeAdapter(type_) # type: ignore # Pydantic v2 + return adapter.validate_python(object_) + else: + return pydantic.parse_obj_as(type_, object_) + + +def to_jsonable_with_fallback( + obj: typing.Any, fallback_serializer: typing.Callable[[typing.Any], typing.Any] +) -> typing.Any: + if IS_PYDANTIC_V2: + from pydantic_core import to_jsonable_python + + return to_jsonable_python(obj, fallback=fallback_serializer) + else: + return fallback_serializer(obj) + + +class UniversalBaseModel(pydantic.BaseModel): + class Config: + populate_by_name = True + smart_union = True + allow_population_by_field_name = True + json_encoders = {dt.datetime: serialize_datetime} + # Allow fields beginning with `model_` to be used in the model + protected_namespaces = () + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + if IS_PYDANTIC_V2: + return super().model_dump_json(**kwargs_with_defaults) # type: ignore # Pydantic v2 + else: + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + """ + Override the default dict method to `exclude_unset` by default. This function patches + `exclude_unset` to work include fields within non-None default values. + """ + # Note: the logic here is multiplexed given the levers exposed in Pydantic V1 vs V2 + # Pydantic V1's .dict can be extremely slow, so we do not want to call it twice. + # + # We'd ideally do the same for Pydantic V2, but it shells out to a library to serialize models + # that we have less control over, and this is less intrusive than custom serializers for now. + if IS_PYDANTIC_V2: + kwargs_with_defaults_exclude_unset: typing.Any = { + **kwargs, + "by_alias": True, + "exclude_unset": True, + "exclude_none": False, + } + kwargs_with_defaults_exclude_none: typing.Any = { + **kwargs, + "by_alias": True, + "exclude_none": True, + "exclude_unset": False, + } + return deep_union_pydantic_dicts( + super().model_dump(**kwargs_with_defaults_exclude_unset), # type: ignore # Pydantic v2 + super().model_dump(**kwargs_with_defaults_exclude_none), # type: ignore # Pydantic v2 + ) + + else: + _fields_set = self.__fields_set__.copy() + + fields = _get_model_fields(self.__class__) + for name, field in fields.items(): + if name not in _fields_set: + default = _get_field_default(field) + + # If the default values are non-null act like they've been set + # This effectively allows exclude_unset to work like exclude_none where + # the latter passes through intentionally set none values. + if default is not None or ("exclude_unset" in kwargs and not kwargs["exclude_unset"]): + _fields_set.add(name) + + if default is not None: + self.__fields_set__.add(name) + + kwargs_with_defaults_exclude_unset_include_fields: typing.Any = { + "by_alias": True, + "exclude_unset": True, + "include": _fields_set, + **kwargs, + } + + return super().dict(**kwargs_with_defaults_exclude_unset_include_fields) + + +def _union_list_of_pydantic_dicts( + source: typing.List[typing.Any], destination: typing.List[typing.Any] +) -> typing.List[typing.Any]: + converted_list: typing.List[typing.Any] = [] + for i, item in enumerate(source): + destination_value = destination[i] # type: ignore + if isinstance(item, dict): + converted_list.append(deep_union_pydantic_dicts(item, destination_value)) + elif isinstance(item, list): + converted_list.append(_union_list_of_pydantic_dicts(item, destination_value)) + else: + converted_list.append(item) + return converted_list + + +def deep_union_pydantic_dicts( + source: typing.Dict[str, typing.Any], destination: typing.Dict[str, typing.Any] +) -> typing.Dict[str, typing.Any]: + for key, value in source.items(): + node = destination.setdefault(key, {}) + if isinstance(value, dict): + deep_union_pydantic_dicts(value, node) + # Note: we do not do this same processing for sets given we do not have sets of models + # and given the sets are unordered, the processing of the set and matching objects would + # be non-trivial. + elif isinstance(value, list): + destination[key] = _union_list_of_pydantic_dicts(value, node) + else: + destination[key] = value + + return destination + + +if IS_PYDANTIC_V2: + + class V2RootModel(UniversalBaseModel, pydantic.RootModel): # type: ignore # Pydantic v2 + pass + + UniversalRootModel: typing_extensions.TypeAlias = V2RootModel # type: ignore +else: + UniversalRootModel: typing_extensions.TypeAlias = UniversalBaseModel # type: ignore + + +def encode_by_type(o: typing.Any) -> typing.Any: + encoders_by_class_tuples: typing.Dict[typing.Callable[[typing.Any], typing.Any], typing.Tuple[typing.Any, ...]] = ( + defaultdict(tuple) + ) + for type_, encoder in encoders_by_type.items(): + encoders_by_class_tuples[encoder] += (type_,) + + if type(o) in encoders_by_type: + return encoders_by_type[type(o)](o) + for encoder, classes_tuple in encoders_by_class_tuples.items(): + if isinstance(o, classes_tuple): + return encoder(o) + + +def update_forward_refs(model: typing.Type["Model"], **localns: typing.Any) -> None: + if IS_PYDANTIC_V2: + model.model_rebuild(raise_errors=False) # type: ignore # Pydantic v2 + else: + model.update_forward_refs(**localns) + + +# Mirrors Pydantic's internal typing +AnyCallable = typing.Callable[..., typing.Any] + + +def universal_root_validator( + pre: bool = False, +) -> typing.Callable[[AnyCallable], AnyCallable]: + def decorator(func: AnyCallable) -> AnyCallable: + if IS_PYDANTIC_V2: + return pydantic.model_validator(mode="before" if pre else "after")(func) # type: ignore # Pydantic v2 + else: + return pydantic.root_validator(pre=pre)(func) # type: ignore # Pydantic v1 + + return decorator + + +def universal_field_validator(field_name: str, pre: bool = False) -> typing.Callable[[AnyCallable], AnyCallable]: + def decorator(func: AnyCallable) -> AnyCallable: + if IS_PYDANTIC_V2: + return pydantic.field_validator(field_name, mode="before" if pre else "after")(func) # type: ignore # Pydantic v2 + else: + return pydantic.validator(field_name, pre=pre)(func) # type: ignore # Pydantic v1 + + return decorator + + +PydanticField = typing.Union[ModelField, pydantic.fields.FieldInfo] + + +def _get_model_fields( + model: typing.Type["Model"], +) -> typing.Mapping[str, PydanticField]: + if IS_PYDANTIC_V2: + return model.model_fields # type: ignore # Pydantic v2 + else: + return model.__fields__ # type: ignore # Pydantic v1 + + +def _get_field_default(field: PydanticField) -> typing.Any: + try: + value = field.get_default() # type: ignore # Pydantic < v1.10.15 + except: + value = field.default + if IS_PYDANTIC_V2: + from pydantic_core import PydanticUndefined + + if value == PydanticUndefined: + return None + return value + return value diff --git a/seed/pydantic/accept-header/src/seed/accept/core/serialization.py b/seed/pydantic/accept-header/src/seed/accept/core/serialization.py new file mode 100644 index 00000000000..cb5dcbf93a9 --- /dev/null +++ b/seed/pydantic/accept-header/src/seed/accept/core/serialization.py @@ -0,0 +1,272 @@ +# This file was auto-generated by Fern from our API Definition. + +import collections +import inspect +import typing + +import typing_extensions + +import pydantic + + +class FieldMetadata: + """ + Metadata class used to annotate fields to provide additional information. + + Example: + class MyDict(TypedDict): + field: typing.Annotated[str, FieldMetadata(alias="field_name")] + + Will serialize: `{"field": "value"}` + To: `{"field_name": "value"}` + """ + + alias: str + + def __init__(self, *, alias: str) -> None: + self.alias = alias + + +def convert_and_respect_annotation_metadata( + *, + object_: typing.Any, + annotation: typing.Any, + inner_type: typing.Optional[typing.Any] = None, + direction: typing.Literal["read", "write"], +) -> typing.Any: + """ + Respect the metadata annotations on a field, such as aliasing. This function effectively + manipulates the dict-form of an object to respect the metadata annotations. This is primarily used for + TypedDicts, which cannot support aliasing out of the box, and can be extended for additional + utilities, such as defaults. + + Parameters + ---------- + object_ : typing.Any + + annotation : type + The type we're looking to apply typing annotations from + + inner_type : typing.Optional[type] + + Returns + ------- + typing.Any + """ + + if object_ is None: + return None + if inner_type is None: + inner_type = annotation + + clean_type = _remove_annotations(inner_type) + # Pydantic models + if ( + inspect.isclass(clean_type) + and issubclass(clean_type, pydantic.BaseModel) + and isinstance(object_, typing.Mapping) + ): + return _convert_mapping(object_, clean_type, direction) + # TypedDicts + if typing_extensions.is_typeddict(clean_type) and isinstance(object_, typing.Mapping): + return _convert_mapping(object_, clean_type, direction) + + if ( + typing_extensions.get_origin(clean_type) == typing.Dict + or typing_extensions.get_origin(clean_type) == dict + or clean_type == typing.Dict + ) and isinstance(object_, typing.Dict): + key_type = typing_extensions.get_args(clean_type)[0] + value_type = typing_extensions.get_args(clean_type)[1] + + return { + key: convert_and_respect_annotation_metadata( + object_=value, + annotation=annotation, + inner_type=value_type, + direction=direction, + ) + for key, value in object_.items() + } + + # If you're iterating on a string, do not bother to coerce it to a sequence. + if not isinstance(object_, str): + if ( + typing_extensions.get_origin(clean_type) == typing.Set + or typing_extensions.get_origin(clean_type) == set + or clean_type == typing.Set + ) and isinstance(object_, typing.Set): + inner_type = typing_extensions.get_args(clean_type)[0] + return { + convert_and_respect_annotation_metadata( + object_=item, + annotation=annotation, + inner_type=inner_type, + direction=direction, + ) + for item in object_ + } + elif ( + ( + typing_extensions.get_origin(clean_type) == typing.List + or typing_extensions.get_origin(clean_type) == list + or clean_type == typing.List + ) + and isinstance(object_, typing.List) + ) or ( + ( + typing_extensions.get_origin(clean_type) == typing.Sequence + or typing_extensions.get_origin(clean_type) == collections.abc.Sequence + or clean_type == typing.Sequence + ) + and isinstance(object_, typing.Sequence) + ): + inner_type = typing_extensions.get_args(clean_type)[0] + return [ + convert_and_respect_annotation_metadata( + object_=item, + annotation=annotation, + inner_type=inner_type, + direction=direction, + ) + for item in object_ + ] + + if typing_extensions.get_origin(clean_type) == typing.Union: + # We should be able to ~relatively~ safely try to convert keys against all + # member types in the union, the edge case here is if one member aliases a field + # of the same name to a different name from another member + # Or if another member aliases a field of the same name that another member does not. + for member in typing_extensions.get_args(clean_type): + object_ = convert_and_respect_annotation_metadata( + object_=object_, + annotation=annotation, + inner_type=member, + direction=direction, + ) + return object_ + + annotated_type = _get_annotation(annotation) + if annotated_type is None: + return object_ + + # If the object is not a TypedDict, a Union, or other container (list, set, sequence, etc.) + # Then we can safely call it on the recursive conversion. + return object_ + + +def _convert_mapping( + object_: typing.Mapping[str, object], + expected_type: typing.Any, + direction: typing.Literal["read", "write"], +) -> typing.Mapping[str, object]: + converted_object: typing.Dict[str, object] = {} + annotations = typing_extensions.get_type_hints(expected_type, include_extras=True) + aliases_to_field_names = _get_alias_to_field_name(annotations) + for key, value in object_.items(): + if direction == "read" and key in aliases_to_field_names: + dealiased_key = aliases_to_field_names.get(key) + if dealiased_key is not None: + type_ = annotations.get(dealiased_key) + else: + type_ = annotations.get(key) + # Note you can't get the annotation by the field name if you're in read mode, so you must check the aliases map + # + # So this is effectively saying if we're in write mode, and we don't have a type, or if we're in read mode and we don't have an alias + # then we can just pass the value through as is + if type_ is None: + converted_object[key] = value + elif direction == "read" and key not in aliases_to_field_names: + converted_object[key] = convert_and_respect_annotation_metadata( + object_=value, annotation=type_, direction=direction + ) + else: + converted_object[_alias_key(key, type_, direction, aliases_to_field_names)] = ( + convert_and_respect_annotation_metadata(object_=value, annotation=type_, direction=direction) + ) + return converted_object + + +def _get_annotation(type_: typing.Any) -> typing.Optional[typing.Any]: + maybe_annotated_type = typing_extensions.get_origin(type_) + if maybe_annotated_type is None: + return None + + if maybe_annotated_type == typing_extensions.NotRequired: + type_ = typing_extensions.get_args(type_)[0] + maybe_annotated_type = typing_extensions.get_origin(type_) + + if maybe_annotated_type == typing_extensions.Annotated: + return type_ + + return None + + +def _remove_annotations(type_: typing.Any) -> typing.Any: + maybe_annotated_type = typing_extensions.get_origin(type_) + if maybe_annotated_type is None: + return type_ + + if maybe_annotated_type == typing_extensions.NotRequired: + return _remove_annotations(typing_extensions.get_args(type_)[0]) + + if maybe_annotated_type == typing_extensions.Annotated: + return _remove_annotations(typing_extensions.get_args(type_)[0]) + + return type_ + + +def get_alias_to_field_mapping(type_: typing.Any) -> typing.Dict[str, str]: + annotations = typing_extensions.get_type_hints(type_, include_extras=True) + return _get_alias_to_field_name(annotations) + + +def get_field_to_alias_mapping(type_: typing.Any) -> typing.Dict[str, str]: + annotations = typing_extensions.get_type_hints(type_, include_extras=True) + return _get_field_to_alias_name(annotations) + + +def _get_alias_to_field_name( + field_to_hint: typing.Dict[str, typing.Any], +) -> typing.Dict[str, str]: + aliases = {} + for field, hint in field_to_hint.items(): + maybe_alias = _get_alias_from_type(hint) + if maybe_alias is not None: + aliases[maybe_alias] = field + return aliases + + +def _get_field_to_alias_name( + field_to_hint: typing.Dict[str, typing.Any], +) -> typing.Dict[str, str]: + aliases = {} + for field, hint in field_to_hint.items(): + maybe_alias = _get_alias_from_type(hint) + if maybe_alias is not None: + aliases[field] = maybe_alias + return aliases + + +def _get_alias_from_type(type_: typing.Any) -> typing.Optional[str]: + maybe_annotated_type = _get_annotation(type_) + + if maybe_annotated_type is not None: + # The actual annotations are 1 onward, the first is the annotated type + annotations = typing_extensions.get_args(maybe_annotated_type)[1:] + + for annotation in annotations: + if isinstance(annotation, FieldMetadata) and annotation.alias is not None: + return annotation.alias + return None + + +def _alias_key( + key: str, + type_: typing.Any, + direction: typing.Literal["read", "write"], + aliases_to_field_names: typing.Dict[str, str], +) -> str: + if direction == "read": + return aliases_to_field_names.get(key, key) + return _get_alias_from_type(type_=type_) or key diff --git a/seed/pydantic/accept-header/src/seed/accept/py.typed b/seed/pydantic/accept-header/src/seed/accept/py.typed new file mode 100644 index 00000000000..e69de29bb2d diff --git a/seed/pydantic/accept-header/tests/custom/test_client.py b/seed/pydantic/accept-header/tests/custom/test_client.py new file mode 100644 index 00000000000..73f811f5ede --- /dev/null +++ b/seed/pydantic/accept-header/tests/custom/test_client.py @@ -0,0 +1,7 @@ +import pytest + + +# Get started with writing tests with pytest at https://docs.pytest.org +@pytest.mark.skip(reason="Unimplemented") +def test_client() -> None: + assert True == True