diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index bee95422f952..f79245f36718 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -57,13 +57,12 @@ jobs: run: pip install -r requirements-types.txt -r reqs/requirements-dev-contrib.txt - run: invoke lint --no-fmt - run: invoke fmt --check - - name: Marker-coverage-check - run: | - invoke marker-coverage - name: Type-check run: | invoke type-check --ci --pretty invoke type-check --ci --pretty --check-stub-sources + - name: Marker-coverage-check + run: invoke marker-coverage docs-changes: # check whether docs were changed @@ -405,6 +404,24 @@ jobs: - name: Run the tests run: invoke ci-tests -m unit --xdist --slowest=10 --timeout=2.0 + pydantic-v1: + needs: [unit-tests, static-analysis] + if: github.event.pull_request.draft == false + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.8" + cache: "pip" + cache-dependency-path: reqs/requirements-dev-test.txt + - name: Install dependencies + run: pip install . -c ci/constraints-test/pydantic-v1-install.txt -r reqs/requirements-dev-test.txt + - name: Run the tests + run: invoke ci-tests -m unit --xdist --slowest=10 --timeout=2.0 + airflow220-min-versions: needs: [unit-tests, static-analysis] runs-on: ubuntu-latest @@ -481,6 +498,7 @@ jobs: py39-min-versions, py310-min-versions, py311-min-versions, + pydantic-v1, airflow220-min-versions, import_gx, usage_stats_integration @@ -528,6 +546,7 @@ jobs: py39-min-versions, py310-min-versions, py311-min-versions, + pydantic-v1, airflow220-min-versions, import_gx, usage_stats_integration, diff --git a/ci/constraints-test/pydantic-v1-install.txt b/ci/constraints-test/pydantic-v1-install.txt new file mode 100644 index 000000000000..82520e73268e --- /dev/null +++ b/ci/constraints-test/pydantic-v1-install.txt @@ -0,0 +1,3 @@ +# this is higher than our min version but is the earliest version that actually works +# TODO: update our min version or fix gx for our current min version +pydantic==1.10.8 diff --git a/contrib/cli/requirements.txt b/contrib/cli/requirements.txt index a56bff7a02c5..fe46e6791e1b 100644 --- a/contrib/cli/requirements.txt +++ b/contrib/cli/requirements.txt @@ -2,7 +2,7 @@ black[jupyter]==23.3.0 # Linting / code style Click>=7.1.2 # CLI tooling cookiecutter==2.1.1 # Project templating mypy==1.5.1 # Type checker -pydantic>=1.0,<2.0 # Needed for mypy plugin +pydantic>=1.0 # Needed for mypy plugin pytest>=5.3.5 # Test framework ruff==0.0.284 # Linting / code style twine==3.7.1 # Packaging diff --git a/great_expectations/agent/actions/agent_action.py b/great_expectations/agent/actions/agent_action.py index fd9d78855a4b..0ad28530ce5c 100644 --- a/great_expectations/agent/actions/agent_action.py +++ b/great_expectations/agent/actions/agent_action.py @@ -1,9 +1,8 @@ from abc import abstractmethod from typing import Generic, Sequence, TypeVar -from pydantic import BaseModel - from great_expectations.agent.models import CreatedResource, Event +from great_expectations.compatibility.pydantic import BaseModel from great_expectations.data_context import CloudDataContext diff --git a/great_expectations/agent/actions/draft_datasource_config_action.py b/great_expectations/agent/actions/draft_datasource_config_action.py index 16d0d3d9729d..75c1a7edb800 100644 --- a/great_expectations/agent/actions/draft_datasource_config_action.py +++ b/great_expectations/agent/actions/draft_datasource_config_action.py @@ -1,10 +1,9 @@ from uuid import UUID -import pydantic - from great_expectations.agent.actions import ActionResult, AgentAction from great_expectations.agent.config import GxAgentEnvVars from great_expectations.agent.models import DraftDatasourceConfigEvent +from great_expectations.compatibility import pydantic from great_expectations.compatibility.typing_extensions import override from great_expectations.core.http import create_session @@ -33,7 +32,7 @@ def run(self, event: DraftDatasourceConfigEvent, id: str) -> ActionResult: def get_draft_config(self, config_id: UUID) -> dict: try: - config = GxAgentEnvVars() + config = GxAgentEnvVars() # type: ignore[call-arg] # args pulled from env vars except pydantic.ValidationError as validation_err: raise RuntimeError( f"Missing or badly formed environment variable\n{validation_err.errors()}" diff --git a/great_expectations/agent/actions/list_table_names.py b/great_expectations/agent/actions/list_table_names.py index a52c05119209..d00798e38a75 100644 --- a/great_expectations/agent/actions/list_table_names.py +++ b/great_expectations/agent/actions/list_table_names.py @@ -1,7 +1,5 @@ from typing import TYPE_CHECKING, List -import pydantic - from great_expectations.agent.actions.agent_action import ( ActionResult, AgentAction, @@ -10,6 +8,7 @@ from great_expectations.agent.models import ( ListTableNamesEvent, ) +from great_expectations.compatibility import pydantic from great_expectations.compatibility.sqlalchemy import inspect from great_expectations.compatibility.typing_extensions import override from great_expectations.core.http import create_session @@ -47,7 +46,7 @@ def _add_or_update_table_names_list( self, datasource_id: str, table_names: List[str] ) -> None: try: - cloud_config = GxAgentEnvVars() + cloud_config = GxAgentEnvVars() # type: ignore[call-arg] # args pulled from env vars except pydantic.ValidationError as validation_err: raise RuntimeError( f"Missing or badly formed environment variable\n{validation_err.errors()}" diff --git a/great_expectations/agent/agent.py b/great_expectations/agent/agent.py index 813fee17971b..71d2b385b1dc 100644 --- a/great_expectations/agent/agent.py +++ b/great_expectations/agent/agent.py @@ -6,9 +6,6 @@ from functools import partial from typing import TYPE_CHECKING, Dict, Optional -import pydantic -from pydantic import AmqpDsn, AnyUrl - from great_expectations import get_context from great_expectations.agent.actions.agent_action import ActionResult from great_expectations.agent.config import GxAgentEnvVars @@ -32,6 +29,8 @@ JobStatus, UnknownEvent, ) +from great_expectations.compatibility import pydantic +from great_expectations.compatibility.pydantic import AmqpDsn, AnyUrl from great_expectations.core.http import create_session from great_expectations.data_context.cloud_constants import CLOUD_DEFAULT_BASE_URL @@ -222,7 +221,7 @@ def _get_config(cls) -> GXAgentConfig: # ensure we have all required env variables, and provide a useful error if not try: - env_vars = GxAgentEnvVars() + env_vars = GxAgentEnvVars() # type: ignore[call-arg] # args pulled from env vars except pydantic.ValidationError as validation_err: raise GXAgentError( f"Missing or badly formed environment variable\n{validation_err.errors()}" diff --git a/great_expectations/agent/config.py b/great_expectations/agent/config.py index f7f902c8be6a..c8fe0ad0229d 100644 --- a/great_expectations/agent/config.py +++ b/great_expectations/agent/config.py @@ -1,6 +1,5 @@ -import pydantic -from pydantic import AnyUrl - +from great_expectations.compatibility import pydantic +from great_expectations.compatibility.pydantic import AnyUrl from great_expectations.data_context.cloud_constants import CLOUD_DEFAULT_BASE_URL diff --git a/great_expectations/agent/message_service/subscriber.py b/great_expectations/agent/message_service/subscriber.py index 044c6f8e6975..481f53fa1e6d 100644 --- a/great_expectations/agent/message_service/subscriber.py +++ b/great_expectations/agent/message_service/subscriber.py @@ -5,7 +5,6 @@ from json import JSONDecodeError from typing import Callable, Coroutine, Union -import pydantic from pika.exceptions import AMQPError, ChannelError from great_expectations.agent.message_service.asyncio_rabbit_mq_client import ( @@ -13,6 +12,7 @@ OnMessagePayload, ) from great_expectations.agent.models import Event, UnknownEvent +from great_expectations.compatibility import pydantic @dataclass(frozen=True) diff --git a/great_expectations/agent/models.py b/great_expectations/agent/models.py index a977cbd5e1d3..0338d50a430d 100644 --- a/great_expectations/agent/models.py +++ b/great_expectations/agent/models.py @@ -2,9 +2,10 @@ from typing import Literal, Sequence, Union from uuid import UUID -from pydantic import BaseModel, Extra, Field from typing_extensions import Annotated +from great_expectations.compatibility.pydantic import BaseModel, Extra, Field + class AgentBaseModel(BaseModel): class Config: diff --git a/great_expectations/compatibility/pydantic.py b/great_expectations/compatibility/pydantic.py new file mode 100644 index 000000000000..8a8b0aee8af3 --- /dev/null +++ b/great_expectations/compatibility/pydantic.py @@ -0,0 +1,56 @@ +import pydantic + +from great_expectations.compatibility.not_imported import ( + is_version_greater_or_equal, +) + +if is_version_greater_or_equal(version=pydantic.VERSION, compare_version="2.0.0"): + # TODO: don't use star imports + from pydantic.v1 import * # noqa: F403 + from pydantic.v1 import ( + AnyUrl, + UrlError, + error_wrappers, + errors, + fields, + generics, + json, + networks, + schema, + typing, + ) + from pydantic.v1.generics import GenericModel + from pydantic.v1.main import ModelMetaclass + +else: + # TODO: don't use star imports + from pydantic import * # type: ignore[assignment,no-redef] # noqa: F403 + from pydantic import ( # type: ignore[no-redef] + AnyUrl, + UrlError, + error_wrappers, + errors, + fields, + generics, + json, + networks, + schema, + typing, + ) + from pydantic.generics import GenericModel # type: ignore[no-redef] + from pydantic.main import ModelMetaclass # type: ignore[no-redef] + +__all__ = [ + "AnyUrl", + "error_wrappers", + "errors", + "fields", + "GenericModel", + "generics", + "json", + "ModelMetaclass", + "networks", + "schema", + "typing", + "UrlError", +] diff --git a/great_expectations/core/util.py b/great_expectations/core/util.py index a80556df6b1f..ca870d022b4e 100644 --- a/great_expectations/core/util.py +++ b/great_expectations/core/util.py @@ -30,11 +30,10 @@ import dateutil.parser import numpy as np import pandas as pd -import pydantic from IPython import get_ipython from great_expectations import exceptions as gx_exceptions -from great_expectations.compatibility import pyspark, sqlalchemy +from great_expectations.compatibility import pydantic, pyspark, sqlalchemy from great_expectations.compatibility.sqlalchemy import ( SQLALCHEMY_NOT_IMPORTED, LegacyRow, diff --git a/great_expectations/datasource/data_connector/batch_filter.py b/great_expectations/datasource/data_connector/batch_filter.py index 4353a9852e90..f15e21bb6678 100644 --- a/great_expectations/datasource/data_connector/batch_filter.py +++ b/great_expectations/datasource/data_connector/batch_filter.py @@ -4,9 +4,8 @@ import logging from typing import TYPE_CHECKING, Callable, Dict, List, Optional, Sequence, Union -from pydantic import StrictInt, StrictStr - import great_expectations.exceptions as gx_exceptions +from great_expectations.compatibility.pydantic import StrictInt, StrictStr from great_expectations.compatibility.typing_extensions import override from great_expectations.core.id_dict import IDDict diff --git a/great_expectations/datasource/fluent/batch_request.py b/great_expectations/datasource/fluent/batch_request.py index efb7a3ac4ea0..22198a9988cf 100644 --- a/great_expectations/datasource/fluent/batch_request.py +++ b/great_expectations/datasource/fluent/batch_request.py @@ -11,11 +11,14 @@ Union, ) -import pydantic -from pydantic import StrictStr -from pydantic.json import pydantic_encoder -from pydantic.schema import default_ref_template +from great_expectations.compatibility import pydantic +from great_expectations.compatibility.pydantic import Field, StrictStr +from great_expectations.compatibility.pydantic import json as pydantic_json +from great_expectations.compatibility.pydantic import ( + schema as pydantic_schema, +) +# default_ref_template from great_expectations.compatibility.typing_extensions import override from great_expectations.core._docs_decorators import public_api from great_expectations.datasource.data_connector.batch_filter import ( @@ -59,17 +62,17 @@ class BatchRequest(pydantic.BaseModel): BatchRequest """ - datasource_name: StrictStr = pydantic.Field( + datasource_name: StrictStr = Field( ..., allow_mutation=False, description="The name of the Datasource used to connect to the data.", ) - data_asset_name: StrictStr = pydantic.Field( + data_asset_name: StrictStr = Field( ..., allow_mutation=False, description="The name of the Data Asset used to connect to the data.", ) - options: BatchRequestOptions = pydantic.Field( + options: BatchRequestOptions = Field( default_factory=dict, allow_mutation=True, description=( @@ -215,7 +218,7 @@ def schema_json( cls, *, by_alias: bool = True, - ref_template: str = default_ref_template, + ref_template: str = pydantic_schema.default_ref_template, **dumps_kwargs: Any, ) -> str: # batch_slice is only a property/pydantic setter, so we need to add a field @@ -230,7 +233,7 @@ def schema_json( ) result = cls.__config__.json_dumps( cls.schema(by_alias=by_alias, ref_template=ref_template), - default=pydantic_encoder, + default=pydantic_json.pydantic_encoder, **dumps_kwargs, ) # revert model changes diff --git a/great_expectations/datasource/fluent/batch_request.pyi b/great_expectations/datasource/fluent/batch_request.pyi index ff0081ad90ef..2d7fdd4eba0e 100644 --- a/great_expectations/datasource/fluent/batch_request.pyi +++ b/great_expectations/datasource/fluent/batch_request.pyi @@ -1,9 +1,9 @@ from typing import Any, Dict, Optional -import pydantic -from pydantic import StrictStr from typing_extensions import TypeAlias +from great_expectations.compatibility import pydantic +from great_expectations.compatibility.pydantic import StrictStr from great_expectations.datasource.data_connector.batch_filter import BatchSlice BatchRequestOptions: TypeAlias = Dict[StrictStr, Any] diff --git a/great_expectations/datasource/fluent/config.py b/great_expectations/datasource/fluent/config.py index 607e84c3a601..6473d063ca48 100644 --- a/great_expectations/datasource/fluent/config.py +++ b/great_expectations/datasource/fluent/config.py @@ -21,9 +21,9 @@ overload, ) -from pydantic import Extra, Field, validator from ruamel.yaml import YAML +from great_expectations.compatibility.pydantic import Extra, Field, validator from great_expectations.compatibility.sqlalchemy import TextClause from great_expectations.compatibility.typing_extensions import override from great_expectations.datasource.fluent.constants import ( @@ -40,8 +40,9 @@ ) if TYPE_CHECKING: - from pydantic.error_wrappers import ErrorDict as PydanticErrorDict - + from great_expectations.compatibility.pydantic.error_wrappers import ( + ErrorDict as PydanticErrorDict, + ) from great_expectations.datasource.fluent.fluent_base_model import ( AbstractSetIntStr, MappingIntStrAny, diff --git a/great_expectations/datasource/fluent/config_str.py b/great_expectations/datasource/fluent/config_str.py index 7a22d4891efc..b3db7e20e0f1 100644 --- a/great_expectations/datasource/fluent/config_str.py +++ b/great_expectations/datasource/fluent/config_str.py @@ -4,8 +4,7 @@ import warnings from typing import TYPE_CHECKING, Mapping -from pydantic import SecretStr - +from great_expectations.compatibility.pydantic import SecretStr from great_expectations.compatibility.typing_extensions import override from great_expectations.core.config_substitutor import TEMPLATE_STR_REGEX diff --git a/great_expectations/datasource/fluent/data_asset/data_connector/azure_blob_storage_data_connector.py b/great_expectations/datasource/fluent/data_asset/data_connector/azure_blob_storage_data_connector.py index d7e8c39de2fc..5ba496df2ce8 100644 --- a/great_expectations/datasource/fluent/data_asset/data_connector/azure_blob_storage_data_connector.py +++ b/great_expectations/datasource/fluent/data_asset/data_connector/azure_blob_storage_data_connector.py @@ -4,8 +4,7 @@ import re from typing import TYPE_CHECKING, Callable, ClassVar, List, Optional, Type -import pydantic - +from great_expectations.compatibility import pydantic from great_expectations.compatibility.typing_extensions import override from great_expectations.core.batch_spec import AzureBatchSpec, PathBatchSpec from great_expectations.datasource.data_connector.util import ( diff --git a/great_expectations/datasource/fluent/data_asset/data_connector/filesystem_data_connector.py b/great_expectations/datasource/fluent/data_asset/data_connector/filesystem_data_connector.py index 695ba81bfa18..82d8b395f346 100644 --- a/great_expectations/datasource/fluent/data_asset/data_connector/filesystem_data_connector.py +++ b/great_expectations/datasource/fluent/data_asset/data_connector/filesystem_data_connector.py @@ -5,8 +5,7 @@ import re from typing import TYPE_CHECKING, Callable, ClassVar, List, Optional, Type -import pydantic - +from great_expectations.compatibility import pydantic from great_expectations.compatibility.typing_extensions import override from great_expectations.datasource.data_connector.util import ( get_filesystem_one_level_directory_glob_path_list, diff --git a/great_expectations/datasource/fluent/data_asset/data_connector/google_cloud_storage_data_connector.py b/great_expectations/datasource/fluent/data_asset/data_connector/google_cloud_storage_data_connector.py index cbc2ef8490ee..3f34695f4fc4 100644 --- a/great_expectations/datasource/fluent/data_asset/data_connector/google_cloud_storage_data_connector.py +++ b/great_expectations/datasource/fluent/data_asset/data_connector/google_cloud_storage_data_connector.py @@ -4,8 +4,7 @@ import re from typing import TYPE_CHECKING, Callable, ClassVar, List, Optional, Type -import pydantic - +from great_expectations.compatibility import pydantic from great_expectations.compatibility.typing_extensions import override from great_expectations.core.batch_spec import GCSBatchSpec, PathBatchSpec from great_expectations.datasource.data_connector.util import ( diff --git a/great_expectations/datasource/fluent/data_asset/data_connector/s3_data_connector.py b/great_expectations/datasource/fluent/data_asset/data_connector/s3_data_connector.py index 995b8289933c..feae2e93f896 100644 --- a/great_expectations/datasource/fluent/data_asset/data_connector/s3_data_connector.py +++ b/great_expectations/datasource/fluent/data_asset/data_connector/s3_data_connector.py @@ -4,8 +4,7 @@ import re from typing import TYPE_CHECKING, Callable, ClassVar, List, Optional, Type -import pydantic - +from great_expectations.compatibility import pydantic from great_expectations.compatibility.typing_extensions import override from great_expectations.core.batch_spec import PathBatchSpec, S3BatchSpec from great_expectations.datasource.data_connector.util import ( diff --git a/great_expectations/datasource/fluent/databricks_sql_datasource.py b/great_expectations/datasource/fluent/databricks_sql_datasource.py index fad1d307f7d0..b340e787bc13 100644 --- a/great_expectations/datasource/fluent/databricks_sql_datasource.py +++ b/great_expectations/datasource/fluent/databricks_sql_datasource.py @@ -3,9 +3,8 @@ from typing import TYPE_CHECKING, ClassVar, List, Literal, Type, Union, overload from urllib import parse -import pydantic -from pydantic import AnyUrl - +from great_expectations.compatibility import pydantic +from great_expectations.compatibility.pydantic import AnyUrl from great_expectations.compatibility.sqlalchemy import ( sqlalchemy as sa, ) @@ -27,10 +26,10 @@ ) if TYPE_CHECKING: - from pydantic.networks import Parts from sqlalchemy.sql import quoted_name # noqa: TID251 # type-checking only from great_expectations.compatibility import sqlalchemy + from great_expectations.compatibility.pydantic.networks import Parts from great_expectations.core.config_provider import _ConfigurationProvider diff --git a/great_expectations/datasource/fluent/dynamic_pandas.py b/great_expectations/datasource/fluent/dynamic_pandas.py index db69ff006707..722ddf8e0743 100644 --- a/great_expectations/datasource/fluent/dynamic_pandas.py +++ b/great_expectations/datasource/fluent/dynamic_pandas.py @@ -30,11 +30,12 @@ ) import pandas as pd -import pydantic from packaging.version import Version -from pydantic import AnyUrl, Field, FilePath -# from pydantic.typing import resolve_annotations +from great_expectations.compatibility import pydantic +from great_expectations.compatibility.pydantic import AnyUrl, Field, FilePath + +# from great_expectations.compatibility.pydantic.typing import resolve_annotations from great_expectations.datasource.fluent.config_str import ConfigStr from great_expectations.datasource.fluent.interfaces import ( DataAsset, diff --git a/great_expectations/datasource/fluent/file_path_data_asset.py b/great_expectations/datasource/fluent/file_path_data_asset.py index 4264a72eecdf..c33c23bfc77a 100644 --- a/great_expectations/datasource/fluent/file_path_data_asset.py +++ b/great_expectations/datasource/fluent/file_path_data_asset.py @@ -18,9 +18,8 @@ Set, ) -import pydantic - import great_expectations.exceptions as gx_exceptions +from great_expectations.compatibility import pydantic from great_expectations.compatibility.typing_extensions import override from great_expectations.core._docs_decorators import public_api from great_expectations.datasource.fluent.batch_request import ( diff --git a/great_expectations/datasource/fluent/fluent_base_model.py b/great_expectations/datasource/fluent/fluent_base_model.py index 5904868dc948..aa7b4bc69841 100644 --- a/great_expectations/datasource/fluent/fluent_base_model.py +++ b/great_expectations/datasource/fluent/fluent_base_model.py @@ -19,9 +19,9 @@ overload, ) -import pydantic from ruamel.yaml import YAML +from great_expectations.compatibility import pydantic from great_expectations.compatibility.typing_extensions import override from great_expectations.datasource.fluent.config_str import ConfigStr from great_expectations.datasource.fluent.constants import ( diff --git a/great_expectations/datasource/fluent/interfaces.py b/great_expectations/datasource/fluent/interfaces.py index 8bb225ff2c81..db5bce8499ec 100644 --- a/great_expectations/datasource/fluent/interfaces.py +++ b/great_expectations/datasource/fluent/interfaces.py @@ -28,16 +28,15 @@ Union, ) -import pydantic -from pydantic import ( +from great_expectations.compatibility import pydantic +from great_expectations.compatibility.pydantic import ( Field, StrictBool, StrictInt, root_validator, validate_arguments, ) -from pydantic import dataclasses as pydantic_dc - +from great_expectations.compatibility.pydantic import dataclasses as pydantic_dc from great_expectations.compatibility.typing_extensions import override from great_expectations.core._docs_decorators import public_api from great_expectations.core.config_substitutor import _ConfigurationSubstitutor diff --git a/great_expectations/datasource/fluent/metadatasource.py b/great_expectations/datasource/fluent/metadatasource.py index 0d4289305877..b8e0c04d9b43 100644 --- a/great_expectations/datasource/fluent/metadatasource.py +++ b/great_expectations/datasource/fluent/metadatasource.py @@ -7,15 +7,14 @@ from pprint import pformat as pf from typing import Set, Type -import pydantic - +from great_expectations.compatibility.pydantic import ModelMetaclass from great_expectations.datasource.fluent.sources import _SourceFactories from great_expectations.datasource.fluent.type_lookup import TypeLookup logger = logging.getLogger(__name__) -class MetaDatasource(pydantic.main.ModelMetaclass): +class MetaDatasource(ModelMetaclass): __cls_set: Set[Type] = set() def __new__( # noqa: PYI034 # Self cannot be used with Metaclass diff --git a/great_expectations/datasource/fluent/pandas_azure_blob_storage_datasource.py b/great_expectations/datasource/fluent/pandas_azure_blob_storage_datasource.py index 06e76e767f10..b7951b89dea5 100644 --- a/great_expectations/datasource/fluent/pandas_azure_blob_storage_datasource.py +++ b/great_expectations/datasource/fluent/pandas_azure_blob_storage_datasource.py @@ -4,9 +4,7 @@ import re from typing import TYPE_CHECKING, Any, ClassVar, Dict, Final, Literal, Type, Union -import pydantic - -from great_expectations.compatibility import azure +from great_expectations.compatibility import azure, pydantic from great_expectations.compatibility.typing_extensions import override from great_expectations.core._docs_decorators import public_api from great_expectations.core.util import AzureUrl diff --git a/great_expectations/datasource/fluent/pandas_datasource.py b/great_expectations/datasource/fluent/pandas_datasource.py index c9b1d83e52e4..8c337a391196 100644 --- a/great_expectations/datasource/fluent/pandas_datasource.py +++ b/great_expectations/datasource/fluent/pandas_datasource.py @@ -24,10 +24,9 @@ ) import pandas as pd -import pydantic import great_expectations.exceptions as gx_exceptions -from great_expectations.compatibility import sqlalchemy +from great_expectations.compatibility import pydantic, sqlalchemy from great_expectations.compatibility.sqlalchemy import sqlalchemy as sa from great_expectations.compatibility.typing_extensions import override from great_expectations.core._docs_decorators import ( @@ -769,7 +768,7 @@ def add_csv_asset( """ asset = CSVAsset( name=name, - filepath_or_buffer=filepath_or_buffer, + filepath_or_buffer=filepath_or_buffer, # type: ignore[call-arg] **kwargs, ) return self._add_asset(asset=asset) @@ -818,7 +817,7 @@ def add_excel_asset( Returns: The ExcelAsset that has been added to this datasource. """ - asset = ExcelAsset( + asset = ExcelAsset( # type: ignore[call-arg] name=name, io=io, **kwargs, @@ -869,7 +868,7 @@ def add_feather_asset( Returns: The FeatherAsset that has been added to this datasource. """ - asset = FeatherAsset( + asset = FeatherAsset( # type: ignore[call-arg] name=name, path=path, **kwargs, @@ -920,7 +919,7 @@ def add_fwf_asset( Returns: The FWFAsset that has been added to this datasource. """ - asset = FWFAsset( + asset = FWFAsset( # type: ignore[call-arg] name=name, filepath_or_buffer=filepath_or_buffer, **kwargs, @@ -971,7 +970,7 @@ def add_gbq_asset( Returns: The GBQAsset that has been added to this datasource. """ - asset = GBQAsset( + asset = GBQAsset( # type: ignore[call-arg] name=name, query=query, **kwargs, @@ -1022,7 +1021,7 @@ def add_hdf_asset( Returns: The HDFAsset that has been added to this datasource. """ - asset = HDFAsset( + asset = HDFAsset( # type: ignore[call-arg] name=name, path_or_buf=path_or_buf, **kwargs, @@ -1073,7 +1072,7 @@ def add_html_asset( Returns: The HTMLAsset that has been added to this datasource. """ - asset = HTMLAsset( + asset = HTMLAsset( # type: ignore[call-arg] name=name, io=io, **kwargs, @@ -1124,7 +1123,7 @@ def add_json_asset( Returns: The JSONAsset that has been added to this datasource. """ - asset = JSONAsset( + asset = JSONAsset( # type: ignore[call-arg] name=name, path_or_buf=path_or_buf, **kwargs, @@ -1175,7 +1174,7 @@ def add_orc_asset( Returns: The ORCAsset that has been added to this datasource. """ - asset = ORCAsset( + asset = ORCAsset( # type: ignore[call-arg] name=name, path=path, **kwargs, @@ -1226,7 +1225,7 @@ def add_parquet_asset( Returns: The ParquetAsset that has been added to this datasource. """ - asset = ParquetAsset( + asset = ParquetAsset( # type: ignore[call-arg] name=name, path=path, **kwargs, @@ -1277,7 +1276,7 @@ def add_pickle_asset( Returns: The PickleAsset that has been added to this datasource. """ - asset = PickleAsset( + asset = PickleAsset( # type: ignore[call-arg] name=name, filepath_or_buffer=filepath_or_buffer, **kwargs, @@ -1328,7 +1327,7 @@ def add_sas_asset( Returns: The SASAsset that has been added to this datasource. """ - asset = SASAsset( + asset = SASAsset( # type: ignore[call-arg] name=name, filepath_or_buffer=filepath_or_buffer, **kwargs, @@ -1379,7 +1378,7 @@ def add_spss_asset( Returns: The SPSSAsset that has been added to this datasource. """ - asset = SPSSAsset( + asset = SPSSAsset( # type: ignore[call-arg] name=name, path=path, **kwargs, @@ -1432,7 +1431,7 @@ def add_sql_asset( Returns: The SQLAsset that has been added to this datasource. """ - asset = SQLAsset( + asset = SQLAsset( # type: ignore[call-arg] name=name, sql=sql, con=con, @@ -1489,7 +1488,7 @@ def add_sql_query_asset( Returns: The SQLQueryAsset that has been added to this datasource. """ - asset = SQLQueryAsset( + asset = SQLQueryAsset( # type: ignore[call-arg] name=name, sql=sql, con=con, @@ -1546,7 +1545,7 @@ def add_sql_table_asset( Returns: The SQLTableAsset that has been added to this datasource. """ - asset = SQLTableAsset( + asset = SQLTableAsset( # type: ignore[call-arg] name=name, table_name=table_name, con=con, @@ -1601,7 +1600,7 @@ def add_stata_asset( Returns: The StataAsset that has been added to this datasource. """ - asset = StataAsset( + asset = StataAsset( # type: ignore[call-arg] name=name, filepath_or_buffer=filepath_or_buffer, **kwargs, @@ -1652,7 +1651,7 @@ def add_table_asset( Returns: The TableAsset that has been added to this datasource. """ - asset = TableAsset( + asset = TableAsset( # type: ignore[call-arg] name=name, filepath_or_buffer=filepath_or_buffer, **kwargs, @@ -1703,7 +1702,7 @@ def add_xml_asset( Returns: The XMLAsset that has been added to this datasource. """ - asset = XMLAsset( + asset = XMLAsset( # type: ignore[call-arg] name=name, path_or_buffer=path_or_buffer, **kwargs, diff --git a/great_expectations/datasource/fluent/pandas_datasource.pyi b/great_expectations/datasource/fluent/pandas_datasource.pyi index ed02771a6495..aeccfda11d06 100644 --- a/great_expectations/datasource/fluent/pandas_datasource.pyi +++ b/great_expectations/datasource/fluent/pandas_datasource.pyi @@ -23,10 +23,9 @@ from typing import ( ) import pandas as pd -import pydantic from typing_extensions import TypeAlias -from great_expectations.compatibility import sqlalchemy +from great_expectations.compatibility import pydantic, sqlalchemy from great_expectations.compatibility.sqlalchemy import sqlalchemy as sa from great_expectations.compatibility.typing_extensions import override from great_expectations.core._docs_decorators import ( diff --git a/great_expectations/datasource/fluent/pandas_google_cloud_storage_datasource.py b/great_expectations/datasource/fluent/pandas_google_cloud_storage_datasource.py index 2804015a7fdc..e08bde5fffe0 100644 --- a/great_expectations/datasource/fluent/pandas_google_cloud_storage_datasource.py +++ b/great_expectations/datasource/fluent/pandas_google_cloud_storage_datasource.py @@ -3,9 +3,7 @@ import logging from typing import TYPE_CHECKING, Any, ClassVar, Dict, Literal, Type, Union -import pydantic - -from great_expectations.compatibility import google +from great_expectations.compatibility import google, pydantic from great_expectations.compatibility.typing_extensions import override from great_expectations.core._docs_decorators import public_api from great_expectations.core.util import GCSUrl diff --git a/great_expectations/datasource/fluent/pandas_s3_datasource.py b/great_expectations/datasource/fluent/pandas_s3_datasource.py index aeabe02fe869..ad33e66962c7 100644 --- a/great_expectations/datasource/fluent/pandas_s3_datasource.py +++ b/great_expectations/datasource/fluent/pandas_s3_datasource.py @@ -3,9 +3,7 @@ import logging from typing import TYPE_CHECKING, Any, ClassVar, Dict, Literal, Type, Union -import pydantic - -from great_expectations.compatibility import aws +from great_expectations.compatibility import aws, pydantic from great_expectations.compatibility.typing_extensions import override from great_expectations.core._docs_decorators import public_api from great_expectations.core.util import S3Url diff --git a/great_expectations/datasource/fluent/postgres_datasource.py b/great_expectations/datasource/fluent/postgres_datasource.py index 70f92c860e5e..76ef92dd9736 100644 --- a/great_expectations/datasource/fluent/postgres_datasource.py +++ b/great_expectations/datasource/fluent/postgres_datasource.py @@ -1,7 +1,6 @@ from typing import Literal, Union -from pydantic import PostgresDsn - +from great_expectations.compatibility.pydantic import PostgresDsn from great_expectations.core._docs_decorators import public_api from great_expectations.datasource.fluent.config_str import ConfigStr from great_expectations.datasource.fluent.sql_datasource import SQLDatasource diff --git a/great_expectations/datasource/fluent/snowflake_datasource.py b/great_expectations/datasource/fluent/snowflake_datasource.py index b6a2d656ae37..f727233b0b04 100644 --- a/great_expectations/datasource/fluent/snowflake_datasource.py +++ b/great_expectations/datasource/fluent/snowflake_datasource.py @@ -2,9 +2,8 @@ from typing import TYPE_CHECKING, ClassVar, Literal, Optional, Union -import pydantic -from pydantic import AnyUrl, errors - +from great_expectations.compatibility import pydantic +from great_expectations.compatibility.pydantic import AnyUrl, errors from great_expectations.compatibility.snowflake import URL from great_expectations.compatibility.sqlalchemy import sqlalchemy as sa from great_expectations.compatibility.typing_extensions import override @@ -16,9 +15,8 @@ ) if TYPE_CHECKING: - from pydantic.networks import Parts - from great_expectations.compatibility import sqlalchemy + from great_expectations.compatibility.pydantic.networks import Parts class _UrlPasswordError(pydantic.UrlError): diff --git a/great_expectations/datasource/fluent/sources.py b/great_expectations/datasource/fluent/sources.py index 51ef055478cc..40812d5f301f 100644 --- a/great_expectations/datasource/fluent/sources.py +++ b/great_expectations/datasource/fluent/sources.py @@ -31,9 +31,9 @@ from great_expectations.datasource.fluent.type_lookup import TypeLookup if TYPE_CHECKING: - import pydantic from typing_extensions import TypeAlias + from great_expectations.compatibility import pydantic from great_expectations.data_context import AbstractDataContext as GXDataContext from great_expectations.datasource.fluent import PandasDatasource from great_expectations.datasource.fluent.interfaces import DataAsset, Datasource @@ -463,7 +463,7 @@ def add_datasource( datasource_type, name_or_datasource, **kwargs ) ) or ( - datasource_type(name=name_or_datasource, **kwargs) # type: ignore[arg-type] # could be Datasource - expect str + datasource_type(name=name_or_datasource, **kwargs) if name_or_datasource else datasource_type(**kwargs) ) diff --git a/great_expectations/datasource/fluent/sources.pyi b/great_expectations/datasource/fluent/sources.pyi index b46a452d0800..75a58163b363 100644 --- a/great_expectations/datasource/fluent/sources.pyi +++ b/great_expectations/datasource/fluent/sources.pyi @@ -14,9 +14,9 @@ from typing import ( overload, ) -import pydantic from typing_extensions import TypeAlias, override +from great_expectations.compatibility import pydantic from great_expectations.data_context import ( AbstractDataContext as GXDataContext, ) diff --git a/great_expectations/datasource/fluent/spark_azure_blob_storage_datasource.py b/great_expectations/datasource/fluent/spark_azure_blob_storage_datasource.py index 9d729e123101..f771e45d954e 100644 --- a/great_expectations/datasource/fluent/spark_azure_blob_storage_datasource.py +++ b/great_expectations/datasource/fluent/spark_azure_blob_storage_datasource.py @@ -4,9 +4,7 @@ import re from typing import TYPE_CHECKING, Any, ClassVar, Dict, Final, Literal, Type, Union -import pydantic - -from great_expectations.compatibility import azure +from great_expectations.compatibility import azure, pydantic from great_expectations.compatibility.typing_extensions import override from great_expectations.core._docs_decorators import public_api from great_expectations.core.util import AzureUrl diff --git a/great_expectations/datasource/fluent/spark_datasource.py b/great_expectations/datasource/fluent/spark_datasource.py index 4717c2c6717e..cd6daa2aab79 100644 --- a/great_expectations/datasource/fluent/spark_datasource.py +++ b/great_expectations/datasource/fluent/spark_datasource.py @@ -15,10 +15,14 @@ Union, ) -import pydantic -from pydantic import StrictBool, StrictFloat, StrictInt, StrictStr - import great_expectations.exceptions as gx_exceptions +from great_expectations.compatibility import pydantic +from great_expectations.compatibility.pydantic import ( + StrictBool, + StrictFloat, + StrictInt, + StrictStr, +) from great_expectations.compatibility.pyspark import DataFrame, pyspark from great_expectations.compatibility.typing_extensions import override from great_expectations.core._docs_decorators import ( @@ -261,7 +265,7 @@ class SparkDatasource(_SparkDatasource): # instance attributes type: Literal["spark"] = "spark" - assets: List[DataFrameAsset] = [] # type: ignore[assignment] + assets: List[DataFrameAsset] = [] @override def test_connection(self, test_assets: bool = True) -> None: diff --git a/great_expectations/datasource/fluent/spark_file_path_datasource.py b/great_expectations/datasource/fluent/spark_file_path_datasource.py index 3af65cc9b589..a07862a3d1db 100644 --- a/great_expectations/datasource/fluent/spark_file_path_datasource.py +++ b/great_expectations/datasource/fluent/spark_file_path_datasource.py @@ -12,9 +12,8 @@ Union, ) -import pydantic -from pydantic import Field - +from great_expectations.compatibility import pydantic +from great_expectations.compatibility.pydantic import Field from great_expectations.compatibility.typing_extensions import override from great_expectations.datasource.fluent import _SparkDatasource from great_expectations.datasource.fluent.directory_data_asset import ( @@ -651,4 +650,4 @@ class _SparkFilePathDatasource(_SparkDatasource): asset_types: ClassVar[Sequence[Type[DataAsset]]] = _SPARK_FILE_PATH_ASSET_TYPES # instance attributes - assets: List[_SPARK_FILE_PATH_ASSET_TYPES_UNION] = [] # type: ignore[assignment] + assets: List[_SPARK_FILE_PATH_ASSET_TYPES_UNION] = [] diff --git a/great_expectations/datasource/fluent/spark_generic_splitters.py b/great_expectations/datasource/fluent/spark_generic_splitters.py index c0a4723a3be3..44b2b922437a 100644 --- a/great_expectations/datasource/fluent/spark_generic_splitters.py +++ b/great_expectations/datasource/fluent/spark_generic_splitters.py @@ -10,8 +10,7 @@ Union, ) -import pydantic - +from great_expectations.compatibility import pydantic from great_expectations.compatibility.typing_extensions import override from great_expectations.datasource.fluent.fluent_base_model import ( FluentBaseModel, diff --git a/great_expectations/datasource/fluent/spark_google_cloud_storage_datasource.py b/great_expectations/datasource/fluent/spark_google_cloud_storage_datasource.py index 4a4533661b40..3c53a1f3116a 100644 --- a/great_expectations/datasource/fluent/spark_google_cloud_storage_datasource.py +++ b/great_expectations/datasource/fluent/spark_google_cloud_storage_datasource.py @@ -3,9 +3,7 @@ import logging from typing import TYPE_CHECKING, Any, ClassVar, Dict, Literal, Type, Union -import pydantic - -from great_expectations.compatibility import google +from great_expectations.compatibility import google, pydantic from great_expectations.compatibility.typing_extensions import override from great_expectations.core._docs_decorators import public_api from great_expectations.core.util import GCSUrl diff --git a/great_expectations/datasource/fluent/spark_s3_datasource.py b/great_expectations/datasource/fluent/spark_s3_datasource.py index bdc48c818f7e..8dd81885113b 100644 --- a/great_expectations/datasource/fluent/spark_s3_datasource.py +++ b/great_expectations/datasource/fluent/spark_s3_datasource.py @@ -3,9 +3,7 @@ import logging from typing import TYPE_CHECKING, Any, ClassVar, Dict, Literal, Type, Union -import pydantic - -from great_expectations.compatibility import aws +from great_expectations.compatibility import aws, pydantic from great_expectations.compatibility.typing_extensions import override from great_expectations.core._docs_decorators import public_api from great_expectations.core.util import S3Url diff --git a/great_expectations/datasource/fluent/sql_datasource.py b/great_expectations/datasource/fluent/sql_datasource.py index a4497812f424..59a6b48078a6 100644 --- a/great_expectations/datasource/fluent/sql_datasource.py +++ b/great_expectations/datasource/fluent/sql_datasource.py @@ -17,11 +17,11 @@ cast, ) -import pydantic -from pydantic import Field from typing_extensions import Annotated import great_expectations.exceptions as gx_exceptions +from great_expectations.compatibility import pydantic +from great_expectations.compatibility.pydantic import Field from great_expectations.compatibility.sqlalchemy import sqlalchemy as sa from great_expectations.compatibility.typing_extensions import override from great_expectations.core._docs_decorators import public_api diff --git a/great_expectations/datasource/fluent/sqlite_datasource.py b/great_expectations/datasource/fluent/sqlite_datasource.py index 15be68d25476..274825921654 100644 --- a/great_expectations/datasource/fluent/sqlite_datasource.py +++ b/great_expectations/datasource/fluent/sqlite_datasource.py @@ -13,8 +13,7 @@ cast, ) -import pydantic - +from great_expectations.compatibility import pydantic from great_expectations.compatibility.typing_extensions import override from great_expectations.core._docs_decorators import public_api from great_expectations.datasource.fluent.config_str import ConfigStr diff --git a/great_expectations/experimental/metric_repository/cloud_data_store.py b/great_expectations/experimental/metric_repository/cloud_data_store.py index bf9fb160df45..13a98f97eeb9 100644 --- a/great_expectations/experimental/metric_repository/cloud_data_store.py +++ b/great_expectations/experimental/metric_repository/cloud_data_store.py @@ -3,9 +3,7 @@ import uuid from typing import TYPE_CHECKING, Any, Dict, TypeVar -import pydantic -from pydantic import BaseModel - +from great_expectations.compatibility.pydantic import BaseModel, Extra from great_expectations.compatibility.typing_extensions import override from great_expectations.core.http import create_session from great_expectations.experimental.metric_repository.data_store import DataStore @@ -27,14 +25,14 @@ class PayloadData(BaseModel): attributes: Dict[str, Any] class Config: - extra = pydantic.Extra.forbid + extra = Extra.forbid class Payload(BaseModel): data: PayloadData class Config: - extra = pydantic.Extra.forbid + extra = Extra.forbid class CloudDataStore(DataStore[StorableTypes]): diff --git a/great_expectations/experimental/metric_repository/column_descriptive_metrics_metric_retriever.py b/great_expectations/experimental/metric_repository/column_descriptive_metrics_metric_retriever.py index 8b58032139a5..1cdd54988b3f 100644 --- a/great_expectations/experimental/metric_repository/column_descriptive_metrics_metric_retriever.py +++ b/great_expectations/experimental/metric_repository/column_descriptive_metrics_metric_retriever.py @@ -78,7 +78,7 @@ def _get_table_metrics(self, batch_request: BatchRequest) -> Sequence[Metric]: TableMetric[int]( batch_id=validator.active_batch.id, metric_name=metric_name, - value=computed_metrics[metric_lookup_key], # type: ignore[arg-type] # Pydantic verifies the value type + value=computed_metrics[metric_lookup_key], exception=None, # TODO: Pass through a MetricException() if an exception is thrown ) ) @@ -89,7 +89,7 @@ def _get_table_metrics(self, batch_request: BatchRequest) -> Sequence[Metric]: TableMetric[List[str]]( batch_id=validator.active_batch.id, metric_name=metric_name, - value=computed_metrics[metric_lookup_key], # type: ignore[arg-type] # Pydantic verifies the value type + value=computed_metrics[metric_lookup_key], exception=None, # TODO: Pass through a MetricException() if an exception is thrown ) ) @@ -142,7 +142,7 @@ def _get_column_metrics( batch_id=validator.active_batch.id, metric_name=metric_name, column=column, - value=computed_metrics[metric_lookup_key], # type: ignore[arg-type] # Pydantic verifies the value type + value=computed_metrics[metric_lookup_key], exception=None, # TODO: Pass through a MetricException() if an exception is thrown ) ) diff --git a/great_expectations/experimental/metric_repository/metrics.py b/great_expectations/experimental/metric_repository/metrics.py index 794e5e751672..f039495dc399 100644 --- a/great_expectations/experimental/metric_repository/metrics.py +++ b/great_expectations/experimental/metric_repository/metrics.py @@ -15,9 +15,8 @@ Union, ) -import pydantic -from pydantic import BaseModel, Field - +from great_expectations.compatibility import pydantic +from great_expectations.compatibility.pydantic import BaseModel, Field from great_expectations.compatibility.typing_extensions import override if TYPE_CHECKING: diff --git a/great_expectations/render/renderer_configuration.py b/great_expectations/render/renderer_configuration.py index c5f3fbd37428..ca2380d45183 100644 --- a/great_expectations/render/renderer_configuration.py +++ b/great_expectations/render/renderer_configuration.py @@ -21,7 +21,9 @@ import dateutil from dateutil.parser import ParserError -from pydantic import ( +from typing_extensions import TypeAlias, TypedDict + +from great_expectations.compatibility.pydantic import ( BaseModel, Field, ValidationError, @@ -29,9 +31,7 @@ root_validator, validator, ) -from pydantic.generics import GenericModel -from typing_extensions import TypeAlias, TypedDict - +from great_expectations.compatibility.pydantic import generics as pydantic_generics from great_expectations.compatibility.typing_extensions import override from great_expectations.core import ( ExpectationConfiguration, # noqa: TCH001 @@ -40,7 +40,11 @@ from great_expectations.render.exceptions import RendererConfigurationError if TYPE_CHECKING: - from pydantic.typing import AbstractSetIntStr, DictStrAny, MappingIntStrAny + from great_expectations.compatibility.pydantic.typing import ( + AbstractSetIntStr, + DictStrAny, + MappingIntStrAny, + ) class RendererValueType(str, Enum): @@ -131,7 +135,7 @@ class MetaNotes(TypedDict): content: List[str] -class RendererConfiguration(GenericModel, Generic[RendererParams]): +class RendererConfiguration(pydantic_generics.GenericModel, Generic[RendererParams]): """ Configuration object built for each renderer. Operations to be performed strictly on this object at the renderer implementation-level. @@ -576,13 +580,13 @@ def add_param( # we need to combine the param passed to add_param() with those existing raw_kwargs if ( name in renderer_params_args - and renderer_params_args[name]["evaluation_parameter"] + and renderer_params_args[name]["evaluation_parameter"] # type: ignore[index] ): new_args = { name: renderer_param( schema=RendererSchema(type=param_type), value=value, - evaluation_parameter=renderer_params_args[name][ + evaluation_parameter=renderer_params_args[name][ # type: ignore[index] "evaluation_parameter" ], ) diff --git a/great_expectations/types/__init__.py b/great_expectations/types/__init__.py index 527477ce3e26..08efcbf7d607 100644 --- a/great_expectations/types/__init__.py +++ b/great_expectations/types/__init__.py @@ -4,9 +4,8 @@ from typing import ClassVar, Dict, Optional, Set import pandas as pd -import pydantic -from great_expectations.compatibility import pyspark +from great_expectations.compatibility import pydantic, pyspark from ..alias_types import JSONValues from ..core._docs_decorators import public_api diff --git a/pyproject.toml b/pyproject.toml index 23f71503b8b2..b926d56d9ef8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -322,6 +322,8 @@ If you are working in a configuration file you may use the inline comment \ "trino".msg = "Please do not import trino directly, import from great_expectations.compatibility.trino instead." "pyarrow".msg = "Please do not import pyarrow directly, import from great_expectations.compatibility.pyarrow instead." "typing_extensions.override".msg = "Do not import typing_extensions.override directly, import `override` from great_expectations.compatibility.typing_extensions instead." +# TODO: remove pydantic once our min version is pydantic v2 +"pydantic".msg = "Please do not import pydantic directly, import from great_expectations.compatibility.pydantic instead." # ----------------------------------------------------------------- diff --git a/requirements.txt b/requirements.txt index b2d3e305ee93..3b0f9c9bb7e9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -20,7 +20,7 @@ pandas>=1.1.0; python_version <= "3.8" pandas>=1.1.3; python_version == "3.9" pandas>=1.3.0; python_version >= "3.10" # patch version updates `typing_extensions` to the needed version -pydantic>=1.9.2,<2.0 +pydantic>=1.9.2 pyparsing>=2.4 python-dateutil>=2.8.1 pytz>=2021.3 diff --git a/tests/datasource/fluent/_fake_cloud_api.py b/tests/datasource/fluent/_fake_cloud_api.py index c3a64c14c310..a38b8075b6af 100644 --- a/tests/datasource/fluent/_fake_cloud_api.py +++ b/tests/datasource/fluent/_fake_cloud_api.py @@ -21,9 +21,9 @@ Union, ) -import pydantic import responses +from great_expectations.compatibility import pydantic from great_expectations.data_context.store.gx_cloud_store_backend import ( ErrorDetail, ErrorPayload, diff --git a/tests/datasource/fluent/data_asset/data_connector/test_filesystem_data_connector.py b/tests/datasource/fluent/data_asset/data_connector/test_filesystem_data_connector.py index 9136a834eb99..766dcc218ec3 100644 --- a/tests/datasource/fluent/data_asset/data_connector/test_filesystem_data_connector.py +++ b/tests/datasource/fluent/data_asset/data_connector/test_filesystem_data_connector.py @@ -2,9 +2,9 @@ import re from typing import TYPE_CHECKING, List -import pydantic import pytest +from great_expectations.compatibility import pydantic from great_expectations.core import IDDict from great_expectations.core.batch import BatchDefinition from great_expectations.datasource.fluent import BatchRequest diff --git a/tests/datasource/fluent/integration/integration_test_utils.py b/tests/datasource/fluent/integration/integration_test_utils.py index fa9853153345..787c36ca4362 100644 --- a/tests/datasource/fluent/integration/integration_test_utils.py +++ b/tests/datasource/fluent/integration/integration_test_utils.py @@ -4,10 +4,10 @@ from typing import TYPE_CHECKING, Dict, Tuple import pytest -from pydantic import ValidationError from great_expectations.checkpoint import SimpleCheckpoint from great_expectations.checkpoint.types.checkpoint_result import CheckpointResult +from great_expectations.compatibility.pydantic import ValidationError from great_expectations.data_context import AbstractDataContext from great_expectations.datasource.fluent import BatchRequest, PandasDatasource from great_expectations.datasource.fluent.interfaces import ( diff --git a/tests/datasource/fluent/integration/test_integration_datasource.py b/tests/datasource/fluent/integration/test_integration_datasource.py index 89e92c5a94db..a0bc8a6dc4d4 100644 --- a/tests/datasource/fluent/integration/test_integration_datasource.py +++ b/tests/datasource/fluent/integration/test_integration_datasource.py @@ -6,11 +6,11 @@ from unittest import mock import pandas as pd -import pydantic import pytest import great_expectations as gx from great_expectations.checkpoint import SimpleCheckpoint +from great_expectations.compatibility import pydantic from great_expectations.data_context import ( AbstractDataContext, CloudDataContext, diff --git a/tests/datasource/fluent/test_config.py b/tests/datasource/fluent/test_config.py index c1d3e36fcf94..aacff16a0fbb 100644 --- a/tests/datasource/fluent/test_config.py +++ b/tests/datasource/fluent/test_config.py @@ -18,9 +18,9 @@ cast, ) -import pydantic import pytest +from great_expectations.compatibility import pydantic from great_expectations.core.yaml_handler import YAMLHandler from great_expectations.data_context import FileDataContext from great_expectations.datasource.fluent.config import ( diff --git a/tests/datasource/fluent/test_config_str.py b/tests/datasource/fluent/test_config_str.py index f2835f9ed1dc..50e9be7da0af 100644 --- a/tests/datasource/fluent/test_config_str.py +++ b/tests/datasource/fluent/test_config_str.py @@ -2,10 +2,10 @@ from typing import Callable, List, Union -import pydantic import pytest from pytest import MonkeyPatch +from great_expectations.compatibility import pydantic from great_expectations.core.config_provider import ( _ConfigurationProvider, _EnvironmentConfigurationProvider, diff --git a/tests/datasource/fluent/test_databricks_sql_datasource.py b/tests/datasource/fluent/test_databricks_sql_datasource.py index 6238861ef254..3715be3f2bdc 100644 --- a/tests/datasource/fluent/test_databricks_sql_datasource.py +++ b/tests/datasource/fluent/test_databricks_sql_datasource.py @@ -1,8 +1,8 @@ from __future__ import annotations -import pydantic import pytest +from great_expectations.compatibility import pydantic from great_expectations.datasource.fluent.databricks_sql_datasource import ( DatabricksSQLDatasource, ) @@ -98,7 +98,9 @@ def test_invalid_connection_string_raises_dsn_error( connection_string: str, expected_errors: list[dict] ): with pytest.raises(pydantic.ValidationError) as exc_info: - _ = DatabricksSQLDatasource(name="my_databricks", connection_string=connection_string) # type: ignore[arg-type] # Pydantic coerces connection_string to DatabricksDsn + _ = DatabricksSQLDatasource( + name="my_databricks", connection_string=connection_string + ) assert expected_errors == exc_info.value.errors() assert "my_token" not in str(exc_info.value.errors()) diff --git a/tests/datasource/fluent/test_metadatasource.py b/tests/datasource/fluent/test_metadatasource.py index 2705f50a2daa..24e28b2710a2 100644 --- a/tests/datasource/fluent/test_metadatasource.py +++ b/tests/datasource/fluent/test_metadatasource.py @@ -8,8 +8,8 @@ from typing import TYPE_CHECKING, ClassVar, Dict, List, Optional, Tuple, Type, Union import pytest -from pydantic import DirectoryPath, validate_arguments +from great_expectations.compatibility.pydantic import DirectoryPath, validate_arguments from great_expectations.core.yaml_handler import YAMLHandler from great_expectations.data_context import AbstractDataContext, FileDataContext from great_expectations.data_context import get_context as get_gx_context diff --git a/tests/datasource/fluent/test_pandas_azure_blob_storage_datasource.py b/tests/datasource/fluent/test_pandas_azure_blob_storage_datasource.py index 089af9f82f2d..92385220a882 100644 --- a/tests/datasource/fluent/test_pandas_azure_blob_storage_datasource.py +++ b/tests/datasource/fluent/test_pandas_azure_blob_storage_datasource.py @@ -75,7 +75,7 @@ def _build_pandas_abs_datasource( azure_client: azure.BlobServiceClient = cast( azure.BlobServiceClient, MockBlobServiceClient() ) - pandas_abs_datasource = PandasAzureBlobStorageDatasource( # type: ignore[call-arg] + pandas_abs_datasource = PandasAzureBlobStorageDatasource( name="pandas_abs_datasource", azure_options=azure_options or {}, ) @@ -157,7 +157,7 @@ def test_construct_pandas_abs_datasource_with_account_url_and_config_credential( ): monkeypatch.setenv("MY_CRED", "my_secret_credential") - pandas_abs_datasource = PandasAzureBlobStorageDatasource( # type: ignore[call-arg] # args are optional + pandas_abs_datasource = PandasAzureBlobStorageDatasource( name="pandas_abs_datasource", azure_options={ "account_url": "my_account_url.blob.core.windows.net", diff --git a/tests/datasource/fluent/test_pandas_datasource.py b/tests/datasource/fluent/test_pandas_datasource.py index ab9298473a28..5bb45ccfd235 100644 --- a/tests/datasource/fluent/test_pandas_datasource.py +++ b/tests/datasource/fluent/test_pandas_datasource.py @@ -7,11 +7,11 @@ from pprint import pformat as pf from typing import TYPE_CHECKING, Any, Callable, Type -import pydantic import pytest from pytest import MonkeyPatch, param import great_expectations.execution_engine.pandas_execution_engine +from great_expectations.compatibility import pydantic from great_expectations.datasource.fluent import PandasDatasource from great_expectations.datasource.fluent.dynamic_pandas import PANDAS_VERSION from great_expectations.datasource.fluent.pandas_datasource import ( @@ -48,7 +48,7 @@ @pytest.fixture def pandas_datasource() -> PandasDatasource: - return PandasDatasource( # type: ignore[call-arg] # type field not required + return PandasDatasource( name="pandas_datasource", ) @@ -149,7 +149,7 @@ def test_add_asset_method_exists_and_is_functional( assert method_name in PandasDatasource.__dict__ - ds = PandasDatasource( # type: ignore[call-arg] # type field not required + ds = PandasDatasource( name="ds_for_testing_add_asset_methods", ) method = getattr(ds, method_name) @@ -175,7 +175,7 @@ def test_add_asset_method_signature(self, asset_class: Type[_PandasDataAsset]): type_name: str = _get_field_details(asset_class, "type").default_value method_name: str = f"add_{type_name}_asset" - ds = PandasDatasource( # type: ignore[call-arg] # type field not required + ds = PandasDatasource( name="ds_for_testing_add_asset_methods", ) method = getattr(ds, method_name) diff --git a/tests/datasource/fluent/test_pandas_dbfs_datasource.py b/tests/datasource/fluent/test_pandas_dbfs_datasource.py index 3dce016a50b0..eab6387e1803 100644 --- a/tests/datasource/fluent/test_pandas_dbfs_datasource.py +++ b/tests/datasource/fluent/test_pandas_dbfs_datasource.py @@ -77,7 +77,7 @@ def pandas_dbfs_datasource( ], ) - pandas_dbfs_datasource = PandasDBFSDatasource( # type: ignore[call-arg] + pandas_dbfs_datasource = PandasDBFSDatasource( name="pandas_dbfs_datasource", base_directory=pathlib.Path(base_directory), ) diff --git a/tests/datasource/fluent/test_pandas_filesystem_datasource.py b/tests/datasource/fluent/test_pandas_filesystem_datasource.py index 0bf9599d0053..cc2e34dadfe1 100644 --- a/tests/datasource/fluent/test_pandas_filesystem_datasource.py +++ b/tests/datasource/fluent/test_pandas_filesystem_datasource.py @@ -9,12 +9,12 @@ from pprint import pformat as pf from typing import TYPE_CHECKING, Any, Optional, Type -import pydantic import pytest from pytest import MonkeyPatch, param import great_expectations.exceptions as ge_exceptions import great_expectations.execution_engine.pandas_execution_engine +from great_expectations.compatibility import pydantic from great_expectations.datasource.fluent import PandasFilesystemDatasource from great_expectations.datasource.fluent.data_asset.data_connector import ( FilesystemDataConnector, @@ -57,7 +57,7 @@ def pandas_filesystem_datasource(empty_data_context) -> PandasFilesystemDatasour .parent.joinpath(base_directory_rel_path) .resolve(strict=True) ) - pandas_filesystem_datasource = PandasFilesystemDatasource( # type: ignore[call-arg] + pandas_filesystem_datasource = PandasFilesystemDatasource( name="pandas_filesystem_datasource", base_directory=base_directory_abs_path, ) @@ -171,7 +171,7 @@ def test_add_asset_method_exists_and_is_functional( assert method_name in PandasFilesystemDatasource.__dict__ - ds = PandasFilesystemDatasource( # type: ignore[call-arg] + ds = PandasFilesystemDatasource( name="ds_for_testing_add_asset_methods", base_directory=pathlib.Path.cwd(), ) @@ -191,7 +191,7 @@ def test_add_asset_method_signature(self, asset_class: Type[_FilePathDataAsset]) type_name: str = _get_field_details(asset_class, "type").default_value method_name: str = f"add_{type_name}_asset" - ds = PandasFilesystemDatasource( # type: ignore[call-arg] + ds = PandasFilesystemDatasource( name="ds_for_testing_add_asset_methods", base_directory=pathlib.Path.cwd(), ) diff --git a/tests/datasource/fluent/test_pandas_google_cloud_storage_datasource.py b/tests/datasource/fluent/test_pandas_google_cloud_storage_datasource.py index f0323b91c25b..f781b873f496 100644 --- a/tests/datasource/fluent/test_pandas_google_cloud_storage_datasource.py +++ b/tests/datasource/fluent/test_pandas_google_cloud_storage_datasource.py @@ -62,7 +62,7 @@ def _build_pandas_gcs_datasource( gcs_options: Dict[str, Any] | None = None ) -> PandasGoogleCloudStorageDatasource: gcs_client: google.Client = cast(google.Client, MockGCSClient()) - pandas_gcs_datasource = PandasGoogleCloudStorageDatasource( # type: ignore[call-arg] + pandas_gcs_datasource = PandasGoogleCloudStorageDatasource( name="pandas_gcs_datasource", bucket_or_name="test_bucket", gcs_options=gcs_options or {}, diff --git a/tests/datasource/fluent/test_pandas_s3_datasource.py b/tests/datasource/fluent/test_pandas_s3_datasource.py index b2f5a4d4be6a..391cd57e30d8 100644 --- a/tests/datasource/fluent/test_pandas_s3_datasource.py +++ b/tests/datasource/fluent/test_pandas_s3_datasource.py @@ -7,13 +7,12 @@ from typing import TYPE_CHECKING, List, cast import pandas as pd -import pydantic import pytest from moto import mock_s3 from pytest import param import great_expectations.exceptions as ge_exceptions -from great_expectations.compatibility import aws +from great_expectations.compatibility import aws, pydantic from great_expectations.core.util import S3Url from great_expectations.datasource.fluent import PandasS3Datasource from great_expectations.datasource.fluent.data_asset.data_connector import ( @@ -109,7 +108,7 @@ def pandas_s3_datasource( Key=key, ) - pandas_s3_datasource = PandasS3Datasource( # type: ignore[call-arg] + pandas_s3_datasource = PandasS3Datasource( name="pandas_s3_datasource", bucket=s3_bucket, ) diff --git a/tests/datasource/fluent/test_postgres_datasource.py b/tests/datasource/fluent/test_postgres_datasource.py index 5cce7f109f5e..0be980b7f569 100644 --- a/tests/datasource/fluent/test_postgres_datasource.py +++ b/tests/datasource/fluent/test_postgres_datasource.py @@ -19,9 +19,9 @@ ) import pytest -from pydantic import ValidationError import great_expectations.exceptions as ge_exceptions +from great_expectations.compatibility.pydantic import ValidationError from great_expectations.core.batch_spec import SqlAlchemyDatasourceBatchSpec from great_expectations.core.yaml_handler import YAMLHandler from great_expectations.data_context.data_context.file_data_context import ( @@ -90,7 +90,7 @@ def _source( PostgresDatasource.execution_engine_override = execution_eng_cls # type: ignore[misc] postgres_datasource = PostgresDatasource( name="my_datasource", - connection_string=connection_string, # type: ignore[arg-type] # coerced + connection_string=connection_string, create_temp_table=create_temp_table, ) if data_context: diff --git a/tests/datasource/fluent/test_snowflake_datasource.py b/tests/datasource/fluent/test_snowflake_datasource.py index 7affa5757036..23812b29ed9f 100644 --- a/tests/datasource/fluent/test_snowflake_datasource.py +++ b/tests/datasource/fluent/test_snowflake_datasource.py @@ -1,8 +1,8 @@ from __future__ import annotations -import pydantic import pytest +from great_expectations.compatibility import pydantic from great_expectations.compatibility.snowflake import snowflake from great_expectations.datasource.fluent.config_str import ConfigStr from great_expectations.datasource.fluent.snowflake_datasource import ( @@ -108,7 +108,9 @@ def test_invalid_connection_string_raises_dsn_error( connection_string: str, expected_errors: list[dict] ): with pytest.raises(pydantic.ValidationError) as exc_info: - _ = SnowflakeDatasource(name="my_snowflake", connection_string=connection_string) # type: ignore[arg-type] # Pydantic coerces connection_string to SnowflakeDsn + _ = SnowflakeDatasource( + name="my_snowflake", connection_string=connection_string + ) assert expected_errors == exc_info.value.errors() diff --git a/tests/datasource/fluent/test_spark_azure_blob_storage_datasource.py b/tests/datasource/fluent/test_spark_azure_blob_storage_datasource.py index 6890abb49cee..495793058206 100644 --- a/tests/datasource/fluent/test_spark_azure_blob_storage_datasource.py +++ b/tests/datasource/fluent/test_spark_azure_blob_storage_datasource.py @@ -236,7 +236,7 @@ def test_construct_csv_asset_directly( mock_azure_client, mock_list_keys, object_keys: List[str] ): mock_list_keys.return_value = object_keys - asset = CSVAsset( + asset = CSVAsset( # type: ignore[call-arg] # missing args name="csv_asset", batching_regex=r"(.+)_(.+)_(\d{4})\.csv", # type: ignore[arg-type] ) @@ -419,7 +419,7 @@ def test_test_connection_failures( bad_regex_config: tuple[re.Pattern, str], ): regex, test_connection_error_message = bad_regex_config - csv_asset = CSVAsset( + csv_asset = CSVAsset( # type: ignore[call-arg] # missing args name="csv_asset", batching_regex=regex, ) diff --git a/tests/datasource/fluent/test_spark_datasource.py b/tests/datasource/fluent/test_spark_datasource.py index 71a7e7baae45..089f8ab4d9c9 100644 --- a/tests/datasource/fluent/test_spark_datasource.py +++ b/tests/datasource/fluent/test_spark_datasource.py @@ -6,9 +6,9 @@ import pathlib from typing import TYPE_CHECKING -import pydantic import pytest +from great_expectations.compatibility import pydantic from great_expectations.datasource.fluent.spark_datasource import ( DataFrameAsset, SparkConfig, diff --git a/tests/datasource/fluent/test_spark_dbfs_datasource.py b/tests/datasource/fluent/test_spark_dbfs_datasource.py index 7b0cb50ab808..aad9c0c1a059 100644 --- a/tests/datasource/fluent/test_spark_dbfs_datasource.py +++ b/tests/datasource/fluent/test_spark_dbfs_datasource.py @@ -65,7 +65,7 @@ def spark_dbfs_datasource(fs: FakeFilesystem, test_backends) -> SparkDBFSDatasou ], ) - return SparkDBFSDatasource( # type: ignore[call-arg] + return SparkDBFSDatasource( name="spark_dbfs_datasource", base_directory=pathlib.Path(base_directory), ) @@ -178,7 +178,7 @@ def test_test_connection_failures( bad_regex_config: tuple[re.Pattern, str], ): regex, test_connection_error_message = bad_regex_config - csv_asset = CSVAsset( + csv_asset = CSVAsset( # type: ignore[call-arg] # missing args name="csv_asset", batching_regex=regex, ) diff --git a/tests/datasource/fluent/test_spark_filesystem_datasource.py b/tests/datasource/fluent/test_spark_filesystem_datasource.py index 231db4c9a4e2..dd69a9276fd5 100644 --- a/tests/datasource/fluent/test_spark_filesystem_datasource.py +++ b/tests/datasource/fluent/test_spark_filesystem_datasource.py @@ -7,11 +7,11 @@ from dataclasses import dataclass from typing import TYPE_CHECKING, List, cast -import pydantic import pytest import great_expectations.exceptions as ge_exceptions from great_expectations.alias_types import PathStr +from great_expectations.compatibility import pydantic from great_expectations.compatibility.pyspark import functions as F from great_expectations.compatibility.pyspark import types as pyspark_types from great_expectations.datasource.fluent.data_asset.data_connector import ( @@ -1042,7 +1042,7 @@ def datasource_test_connection_error_messages( request, ) -> tuple[SparkFilesystemDatasource, TestConnectionError]: batching_regex, test_connection_error = request.param(csv_path=csv_path) - csv_asset = CSVAsset( + csv_asset = CSVAsset( # type: ignore[call-arg] # missing args name="csv_asset", batching_regex=batching_regex, ) diff --git a/tests/datasource/fluent/test_spark_google_cloud_storage_datasource.py b/tests/datasource/fluent/test_spark_google_cloud_storage_datasource.py index 91daaa45971a..a34269bb4864 100644 --- a/tests/datasource/fluent/test_spark_google_cloud_storage_datasource.py +++ b/tests/datasource/fluent/test_spark_google_cloud_storage_datasource.py @@ -215,7 +215,7 @@ def test_construct_csv_asset_directly( mock_gcs_client, mock_list_keys, object_keys: List[str] ): mock_list_keys.return_value = object_keys - asset = CSVAsset( + asset = CSVAsset( # type: ignore[call-arg] # missing args name="csv_asset", batching_regex=r"(.+)_(.+)_(\d{4})\.csv", # type: ignore[arg-type] ) @@ -391,7 +391,7 @@ def test_test_connection_failures( bad_regex_config: tuple[re.Pattern, str], ): regex, test_connection_error_message = bad_regex_config - csv_asset = CSVAsset( + csv_asset = CSVAsset( # type: ignore[call-arg] # missing args name="csv_asset", batching_regex=regex, ) diff --git a/tests/datasource/fluent/test_spark_s3_datasource.py b/tests/datasource/fluent/test_spark_s3_datasource.py index 6ea18d2eb45d..a27943f8b5d6 100644 --- a/tests/datasource/fluent/test_spark_s3_datasource.py +++ b/tests/datasource/fluent/test_spark_s3_datasource.py @@ -281,7 +281,7 @@ def test_test_connection_failures( bad_regex_config: tuple[re.Pattern, str], ): regex, test_connection_error_message = bad_regex_config - csv_asset = CSVAsset( + csv_asset = CSVAsset( # type: ignore[call-arg] # missing args name="csv_asset", batching_regex=regex, ) diff --git a/tests/datasource/fluent/test_sqlite_datasource.py b/tests/datasource/fluent/test_sqlite_datasource.py index 4f440e3e428c..6887cd183e60 100644 --- a/tests/datasource/fluent/test_sqlite_datasource.py +++ b/tests/datasource/fluent/test_sqlite_datasource.py @@ -5,8 +5,8 @@ from typing import TYPE_CHECKING, Any, Callable, Generator, Optional import pytest -from pydantic import ValidationError +from great_expectations.compatibility.pydantic import ValidationError from great_expectations.datasource.fluent import SqliteDatasource from tests.datasource.fluent.conftest import sqlachemy_execution_engine_mock_cls @@ -39,7 +39,7 @@ def sqlite_datasource( connection_string = f"sqlite:///{sqlite_database_path}" return SqliteDatasource( name=sqlite_datasource_name, - connection_string=connection_string, # type: ignore[arg-type] # pydantic will coerce + connection_string=connection_string, ) @@ -100,7 +100,7 @@ def _create_sqlite_source( SqliteDatasource.execution_engine_override = execution_eng_cls # type: ignore[misc] sqlite_datasource = SqliteDatasource( name="sqlite_datasource", - connection_string="sqlite://", # type: ignore[arg-type] # pydantic will coerce + connection_string="sqlite://", create_temp_table=create_temp_table, ) if data_context: diff --git a/tests/render/test_renderer_configuration.py b/tests/render/test_renderer_configuration.py index d6a222540b8a..2d47c7fc66b6 100644 --- a/tests/render/test_renderer_configuration.py +++ b/tests/render/test_renderer_configuration.py @@ -1,8 +1,10 @@ from typing import Union import pytest -from pydantic.error_wrappers import ValidationError +from great_expectations.compatibility.pydantic import ( + error_wrappers as pydantic_error_wrappers, +) from great_expectations.core import ( ExpectationConfiguration, ExpectationValidationResult, @@ -96,7 +98,7 @@ def test_successful_renderer_configuration_instantiation( strict=True, ) def test_failed_renderer_configuration_instantiation(): - with pytest.raises(ValidationError) as e: + with pytest.raises(pydantic_error_wrappers.ValidationError) as e: RendererConfiguration( runtime_configuration={}, ) @@ -134,7 +136,7 @@ def test_renderer_configuration_add_param_validation( renderer_configuration = RendererConfiguration( configuration=expectation_configuration ) - with pytest.raises(ValidationError) as e: + with pytest.raises(pydantic_error_wrappers.ValidationError) as e: renderer_configuration.add_param(name="value", param_type=param_type) if param_type is RendererValueType.STRING: diff --git a/tests/test_packaging.py b/tests/test_packaging.py index f3dbd0758d44..71db1594ad08 100644 --- a/tests/test_packaging.py +++ b/tests/test_packaging.py @@ -185,7 +185,7 @@ def test_polish_and_ratchet_pins_and_upper_bounds(): ) # Polish and ratchet this number down as low as possible - assert len(sorted_packages_with_pins_or_upper_bounds) == 77 + assert len(sorted_packages_with_pins_or_upper_bounds) == 74 assert sorted_packages_with_pins_or_upper_bounds == [ ("requirements-dev-api-docs-test.txt", "docstring-parser", (("==", "0.15"),)), ("requirements-dev-athena.txt", "pyathena", (("<", "3"), (">=", "2.0.0"))), @@ -240,7 +240,6 @@ def test_polish_and_ratchet_pins_and_upper_bounds(): ("requirements-dev.txt", "mypy", (("==", "1.5.1"),)), ("requirements-dev.txt", "pika", (("==", "1.3.1"),)), ("requirements-dev.txt", "pyathena", (("<", "3"), (">=", "2.0.0"))), - ("requirements-dev.txt", "pydantic", (("<", "2.0"), (">=", "1.9.2"))), ("requirements-dev.txt", "pypd", (("==", "1.1.0"),)), ("requirements-dev.txt", "ruamel.yaml", (("<", "0.17.18"), (">=", "0.16"))), ("requirements-dev.txt", "ruff", (("==", "0.0.284"),)), @@ -260,7 +259,6 @@ def test_polish_and_ratchet_pins_and_upper_bounds(): ("requirements-types.txt", "mypy", (("==", "1.5.1"),)), ("requirements-types.txt", "pika", (("==", "1.3.1"),)), ("requirements-types.txt", "pyathena", (("<", "3"), (">=", "2.0.0"))), - ("requirements-types.txt", "pydantic", (("<", "2.0"), (">=", "1.9.2"))), ("requirements-types.txt", "ruamel.yaml", (("<", "0.17.18"), (">=", "0.16"))), ("requirements-types.txt", "ruff", (("==", "0.0.284"),)), ("requirements-types.txt", "snapshottest", (("==", "0.6.0"),)), @@ -270,6 +268,5 @@ def test_polish_and_ratchet_pins_and_upper_bounds(): ("requirements.txt", "altair", (("<", "5.0.0"), (">=", "4.2.1"))), ("requirements.txt", "makefun", (("<", "2"), (">=", "1.7.0"))), ("requirements.txt", "marshmallow", (("<", "4.0.0"), (">=", "3.7.1"))), - ("requirements.txt", "pydantic", (("<", "2.0"), (">=", "1.9.2"))), ("requirements.txt", "ruamel.yaml", (("<", "0.17.18"), (">=", "0.16"))), ]